hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
e683d34545ca8711a15916d84cb42a58410b9f71
| 281
|
py
|
Python
|
test_finder.py
|
akanksha234/Python-Interview-Questions
|
431913628fbef0c9e503cf1915136fae21e8b023
|
[
"MIT"
] | null | null | null |
test_finder.py
|
akanksha234/Python-Interview-Questions
|
431913628fbef0c9e503cf1915136fae21e8b023
|
[
"MIT"
] | null | null | null |
test_finder.py
|
akanksha234/Python-Interview-Questions
|
431913628fbef0c9e503cf1915136fae21e8b023
|
[
"MIT"
] | null | null | null |
from find_missing_element import finder
def test_1():
assert(finder([5, 5, 7, 7], [5, 7, 7])== 5)
def test_2():
assert(finder([1, 2, 3, 4, 5, 6, 7], [3, 7, 2, 1, 4, 6])== 5)
def test_3():
assert(finder([9, 8, 7, 6, 5, 4, 3, 2, 1], [9, 8, 7, 5, 4, 3, 2, 1])== 6)
| 35.125
| 77
| 0.494662
| 61
| 281
| 2.196721
| 0.295082
| 0.156716
| 0.044776
| 0.059701
| 0.074627
| 0
| 0
| 0
| 0
| 0
| 0
| 0.20283
| 0.245552
| 281
| 8
| 77
| 35.125
| 0.429245
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.428571
| 1
| 0.428571
| true
| 0
| 0.142857
| 0
| 0.571429
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
e688cbdec7b801aa6b862d176813286e25591d55
| 211
|
py
|
Python
|
worstcase/__init__.py
|
amosborne/worstcase
|
4a5f3cd907a6fab303607d14a6ad2a0a73db3955
|
[
"MIT"
] | null | null | null |
worstcase/__init__.py
|
amosborne/worstcase
|
4a5f3cd907a6fab303607d14a6ad2a0a73db3955
|
[
"MIT"
] | null | null | null |
worstcase/__init__.py
|
amosborne/worstcase
|
4a5f3cd907a6fab303607d14a6ad2a0a73db3955
|
[
"MIT"
] | null | null | null |
from .worstcase import Config as config
from .worstcase import Derivative as derive
from .worstcase import Parameter as param
from .worstcase import Unit as unit
__all__ = ["config", "param", "derive", "unit"]
| 30.142857
| 47
| 0.767773
| 29
| 211
| 5.448276
| 0.37931
| 0.329114
| 0.481013
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.146919
| 211
| 6
| 48
| 35.166667
| 0.877778
| 0
| 0
| 0
| 0
| 0
| 0.099526
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.8
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
e6a511da7a4190ba3e92fe6af24cc2bd40bd899b
| 70,616
|
py
|
Python
|
bungieapi/generated/components/schemas/__init__.py
|
itemmanager/bungieapi
|
0c4326f88ea0f28a1dcab683dc08c8d21c940fc1
|
[
"MIT"
] | 5
|
2022-01-06T21:05:53.000Z
|
2022-02-12T19:58:11.000Z
|
bungieapi/generated/components/schemas/__init__.py
|
itemmanager/bungieapi
|
0c4326f88ea0f28a1dcab683dc08c8d21c940fc1
|
[
"MIT"
] | 8
|
2021-12-25T02:40:56.000Z
|
2022-03-28T03:31:41.000Z
|
bungieapi/generated/components/schemas/__init__.py
|
itemmanager/bungieapi
|
0c4326f88ea0f28a1dcab683dc08c8d21c940fc1
|
[
"MIT"
] | 1
|
2022-01-30T23:53:25.000Z
|
2022-01-30T23:53:25.000Z
|
# generated by update to not change manually
import dataclasses as dt
import typing as t
from enum import Enum
from bungieapi.json import to_json
class BungieMembershipType(Enum):
"""The types of membership the Accounts system supports.
This is the external facing enum used in place of the internal-only
Bungie.SharedDefinitions.MembershipType.
"""
NONE = 0
TIGER_XBOX = 1
TIGER_PSN = 2
TIGER_STEAM = 3
TIGER_BLIZZARD = 4
TIGER_STADIA = 5
TIGER_DEMON = 10
BUNGIE_NEXT = 254
ALL = (
-1
) # "All" is only valid for searching capabilities: you need to pass the actual matching BungieMembershipType for any query where you pass a known membershipId.
class BungieCredentialType(Enum):
"""The types of credentials the Accounts system supports.
This is the external facing enum used in place of the internal-only
Bungie.SharedDefinitions.CredentialType.
"""
NONE = 0
XUID = 1
PSNID = 2
WLID = 3
FAKE = 4
FACEBOOK = 5
GOOGLE = 8
WINDOWS = 9
DEMON_ID = 10
STEAM_ID = 12
BATTLE_NET_ID = 14
STADIA_ID = 16
TWITCH_ID = 18
@dt.dataclass(frozen=True)
class SearchResultOfContentItemPublicContract:
has_more: bool
query: "PagedQuery"
replacement_continuation_token: str
results: t.Sequence["ContentItemPublicContract"]
total_results: int
use_total_results: bool = dt.field(
metadata={
"description": """If useTotalResults is true, then totalResults represents an accurate count.
If False, it does not, and may be estimated/only the size of the current page.
Either way, you should probably always only trust hasMore.
This is a long-held historical throwback to when we used to do paging with known total results. Those queries toasted our database, and we were left to hastily alter our endpoints and create backward- compatible shims, of which useTotalResults is one."""
}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"results": to_json(self.results),
"totalResults": to_json(self.total_results),
"hasMore": to_json(self.has_more),
"query": to_json(self.query),
"replacementContinuationToken": to_json(
self.replacement_continuation_token
),
"useTotalResults": to_json(self.use_total_results),
}
@dt.dataclass(frozen=True)
class SearchResultOfPostResponse:
has_more: bool
query: "PagedQuery"
replacement_continuation_token: str
results: t.Sequence["PostResponse"]
total_results: int
use_total_results: bool = dt.field(
metadata={
"description": """If useTotalResults is true, then totalResults represents an accurate count.
If False, it does not, and may be estimated/only the size of the current page.
Either way, you should probably always only trust hasMore.
This is a long-held historical throwback to when we used to do paging with known total results. Those queries toasted our database, and we were left to hastily alter our endpoints and create backward- compatible shims, of which useTotalResults is one."""
}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"results": to_json(self.results),
"totalResults": to_json(self.total_results),
"hasMore": to_json(self.has_more),
"query": to_json(self.query),
"replacementContinuationToken": to_json(
self.replacement_continuation_token
),
"useTotalResults": to_json(self.use_total_results),
}
BungieMembershipTypeArray = t.Sequence["BungieMembershipType"]
@dt.dataclass(frozen=True)
class SearchResultOfGroupV2Card:
has_more: bool
query: "PagedQuery"
replacement_continuation_token: str
results: t.Sequence["GroupV2Card"]
total_results: int
use_total_results: bool = dt.field(
metadata={
"description": """If useTotalResults is true, then totalResults represents an accurate count.
If False, it does not, and may be estimated/only the size of the current page.
Either way, you should probably always only trust hasMore.
This is a long-held historical throwback to when we used to do paging with known total results. Those queries toasted our database, and we were left to hastily alter our endpoints and create backward- compatible shims, of which useTotalResults is one."""
}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"results": to_json(self.results),
"totalResults": to_json(self.total_results),
"hasMore": to_json(self.has_more),
"query": to_json(self.query),
"replacementContinuationToken": to_json(
self.replacement_continuation_token
),
"useTotalResults": to_json(self.use_total_results),
}
@dt.dataclass(frozen=True)
class SearchResultOfGroupMember:
has_more: bool
query: "PagedQuery"
replacement_continuation_token: str
results: t.Sequence["GroupMember"]
total_results: int
use_total_results: bool = dt.field(
metadata={
"description": """If useTotalResults is true, then totalResults represents an accurate count.
If False, it does not, and may be estimated/only the size of the current page.
Either way, you should probably always only trust hasMore.
This is a long-held historical throwback to when we used to do paging with known total results. Those queries toasted our database, and we were left to hastily alter our endpoints and create backward- compatible shims, of which useTotalResults is one."""
}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"results": to_json(self.results),
"totalResults": to_json(self.total_results),
"hasMore": to_json(self.has_more),
"query": to_json(self.query),
"replacementContinuationToken": to_json(
self.replacement_continuation_token
),
"useTotalResults": to_json(self.use_total_results),
}
@dt.dataclass(frozen=True)
class SearchResultOfGroupBan:
has_more: bool
query: "PagedQuery"
replacement_continuation_token: str
results: t.Sequence["GroupBan"]
total_results: int
use_total_results: bool = dt.field(
metadata={
"description": """If useTotalResults is true, then totalResults represents an accurate count.
If False, it does not, and may be estimated/only the size of the current page.
Either way, you should probably always only trust hasMore.
This is a long-held historical throwback to when we used to do paging with known total results. Those queries toasted our database, and we were left to hastily alter our endpoints and create backward- compatible shims, of which useTotalResults is one."""
}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"results": to_json(self.results),
"totalResults": to_json(self.total_results),
"hasMore": to_json(self.has_more),
"query": to_json(self.query),
"replacementContinuationToken": to_json(
self.replacement_continuation_token
),
"useTotalResults": to_json(self.use_total_results),
}
@dt.dataclass(frozen=True)
class SearchResultOfGroupMemberApplication:
has_more: bool
query: "PagedQuery"
replacement_continuation_token: str
results: t.Sequence["GroupMemberApplication"]
total_results: int
use_total_results: bool = dt.field(
metadata={
"description": """If useTotalResults is true, then totalResults represents an accurate count.
If False, it does not, and may be estimated/only the size of the current page.
Either way, you should probably always only trust hasMore.
This is a long-held historical throwback to when we used to do paging with known total results. Those queries toasted our database, and we were left to hastily alter our endpoints and create backward- compatible shims, of which useTotalResults is one."""
}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"results": to_json(self.results),
"totalResults": to_json(self.total_results),
"hasMore": to_json(self.has_more),
"query": to_json(self.query),
"replacementContinuationToken": to_json(
self.replacement_continuation_token
),
"useTotalResults": to_json(self.use_total_results),
}
@dt.dataclass(frozen=True)
class SearchResultOfGroupMembership:
has_more: bool
query: "PagedQuery"
replacement_continuation_token: str
results: t.Sequence["GroupMembership"]
total_results: int
use_total_results: bool = dt.field(
metadata={
"description": """If useTotalResults is true, then totalResults represents an accurate count.
If False, it does not, and may be estimated/only the size of the current page.
Either way, you should probably always only trust hasMore.
This is a long-held historical throwback to when we used to do paging with known total results. Those queries toasted our database, and we were left to hastily alter our endpoints and create backward- compatible shims, of which useTotalResults is one."""
}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"results": to_json(self.results),
"totalResults": to_json(self.total_results),
"hasMore": to_json(self.has_more),
"query": to_json(self.query),
"replacementContinuationToken": to_json(
self.replacement_continuation_token
),
"useTotalResults": to_json(self.use_total_results),
}
@dt.dataclass(frozen=True)
class SearchResultOfGroupPotentialMembership:
has_more: bool
query: "PagedQuery"
replacement_continuation_token: str
results: t.Sequence["GroupPotentialMembership"]
total_results: int
use_total_results: bool = dt.field(
metadata={
"description": """If useTotalResults is true, then totalResults represents an accurate count.
If False, it does not, and may be estimated/only the size of the current page.
Either way, you should probably always only trust hasMore.
This is a long-held historical throwback to when we used to do paging with known total results. Those queries toasted our database, and we were left to hastily alter our endpoints and create backward- compatible shims, of which useTotalResults is one."""
}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"results": to_json(self.results),
"totalResults": to_json(self.total_results),
"hasMore": to_json(self.has_more),
"query": to_json(self.query),
"replacementContinuationToken": to_json(
self.replacement_continuation_token
),
"useTotalResults": to_json(self.use_total_results),
}
@dt.dataclass(frozen=True)
class SingleComponentResponseOfDestinyVendorReceiptsComponent:
data: "DestinyVendorReceiptsComponent"
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class SingleComponentResponseOfDestinyInventoryComponent:
data: "DestinyInventoryComponent"
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class SingleComponentResponseOfDestinyProfileComponent:
data: "DestinyProfileComponent"
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class SingleComponentResponseOfDestinyPlatformSilverComponent:
data: "DestinyPlatformSilverComponent"
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class SingleComponentResponseOfDestinyKiosksComponent:
data: "DestinyKiosksComponent"
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class SingleComponentResponseOfDestinyPlugSetsComponent:
data: "DestinyPlugSetsComponent"
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class SingleComponentResponseOfDestinyProfileProgressionComponent:
data: "DestinyProfileProgressionComponent"
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class SingleComponentResponseOfDestinyPresentationNodesComponent:
data: "DestinyPresentationNodesComponent"
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class SingleComponentResponseOfDestinyProfileRecordsComponent:
data: "DestinyProfileRecordsComponent"
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class SingleComponentResponseOfDestinyProfileCollectiblesComponent:
data: "DestinyProfileCollectiblesComponent"
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class SingleComponentResponseOfDestinyProfileTransitoryComponent:
data: "DestinyProfileTransitoryComponent"
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class SingleComponentResponseOfDestinyMetricsComponent:
data: "DestinyMetricsComponent"
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class SingleComponentResponseOfDestinyStringVariablesComponent:
data: "DestinyStringVariablesComponent"
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class DictionaryComponentResponseOfint64AndDestinyCharacterComponent:
data: t.Mapping[str, "DestinyCharacterComponent"]
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class DictionaryComponentResponseOfint64AndDestinyInventoryComponent:
data: t.Mapping[str, "DestinyInventoryComponent"]
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class DictionaryComponentResponseOfint64AndDestinyCharacterProgressionComponent:
data: t.Mapping[str, "DestinyCharacterProgressionComponent"]
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class DictionaryComponentResponseOfint64AndDestinyCharacterRenderComponent:
data: t.Mapping[str, "DestinyCharacterRenderComponent"]
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class DictionaryComponentResponseOfint64AndDestinyCharacterActivitiesComponent:
data: t.Mapping[str, "DestinyCharacterActivitiesComponent"]
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class DictionaryComponentResponseOfint64AndDestinyKiosksComponent:
data: t.Mapping[str, "DestinyKiosksComponent"]
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class DictionaryComponentResponseOfint64AndDestinyPlugSetsComponent:
data: t.Mapping[str, "DestinyPlugSetsComponent"]
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class DestinyBaseItemComponentSetOfuint32:
objectives: "DictionaryComponentResponseOfuint32AndDestinyItemObjectivesComponent"
perks: "DictionaryComponentResponseOfuint32AndDestinyItemPerksComponent"
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"objectives": to_json(self.objectives),
"perks": to_json(self.perks),
}
@dt.dataclass(frozen=True)
class DictionaryComponentResponseOfuint32AndDestinyItemObjectivesComponent:
data: t.Mapping[str, "DestinyItemObjectivesComponent"]
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class DictionaryComponentResponseOfuint32AndDestinyItemPerksComponent:
data: t.Mapping[str, "DestinyItemPerksComponent"]
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class DictionaryComponentResponseOfint64AndDestinyPresentationNodesComponent:
data: t.Mapping[str, "DestinyPresentationNodesComponent"]
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class DictionaryComponentResponseOfint64AndDestinyCharacterRecordsComponent:
data: t.Mapping[str, "DestinyCharacterRecordsComponent"]
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class DictionaryComponentResponseOfint64AndDestinyCollectiblesComponent:
data: t.Mapping[str, "DestinyCollectiblesComponent"]
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class DictionaryComponentResponseOfint64AndDestinyStringVariablesComponent:
data: t.Mapping[str, "DestinyStringVariablesComponent"]
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class DictionaryComponentResponseOfint64AndDestinyCraftablesComponent:
data: t.Mapping[str, "DestinyCraftablesComponent"]
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class DestinyBaseItemComponentSetOfint64:
objectives: "DictionaryComponentResponseOfint64AndDestinyItemObjectivesComponent"
perks: "DictionaryComponentResponseOfint64AndDestinyItemPerksComponent"
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"objectives": to_json(self.objectives),
"perks": to_json(self.perks),
}
@dt.dataclass(frozen=True)
class DictionaryComponentResponseOfint64AndDestinyItemObjectivesComponent:
data: t.Mapping[str, "DestinyItemObjectivesComponent"]
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class DictionaryComponentResponseOfint64AndDestinyItemPerksComponent:
data: t.Mapping[str, "DestinyItemPerksComponent"]
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class DestinyItemComponentSetOfint64:
instances: "DictionaryComponentResponseOfint64AndDestinyItemInstanceComponent"
objectives: "DictionaryComponentResponseOfint64AndDestinyItemObjectivesComponent"
perks: "DictionaryComponentResponseOfint64AndDestinyItemPerksComponent"
plug_objectives: "DictionaryComponentResponseOfint64AndDestinyItemPlugObjectivesComponent"
plug_states: "DictionaryComponentResponseOfuint32AndDestinyItemPlugComponent"
render_data: "DictionaryComponentResponseOfint64AndDestinyItemRenderComponent"
reusable_plugs: "DictionaryComponentResponseOfint64AndDestinyItemReusablePlugsComponent"
sockets: "DictionaryComponentResponseOfint64AndDestinyItemSocketsComponent"
stats: "DictionaryComponentResponseOfint64AndDestinyItemStatsComponent"
talent_grids: "DictionaryComponentResponseOfint64AndDestinyItemTalentGridComponent"
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"instances": to_json(self.instances),
"renderData": to_json(self.render_data),
"stats": to_json(self.stats),
"sockets": to_json(self.sockets),
"reusablePlugs": to_json(self.reusable_plugs),
"plugObjectives": to_json(self.plug_objectives),
"talentGrids": to_json(self.talent_grids),
"plugStates": to_json(self.plug_states),
"objectives": to_json(self.objectives),
"perks": to_json(self.perks),
}
@dt.dataclass(frozen=True)
class DictionaryComponentResponseOfint64AndDestinyItemInstanceComponent:
data: t.Mapping[str, "DestinyItemInstanceComponent"]
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class DictionaryComponentResponseOfint64AndDestinyItemRenderComponent:
data: t.Mapping[str, "DestinyItemRenderComponent"]
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class DictionaryComponentResponseOfint64AndDestinyItemStatsComponent:
data: t.Mapping[str, "DestinyItemStatsComponent"]
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class DictionaryComponentResponseOfint64AndDestinyItemSocketsComponent:
data: t.Mapping[str, "DestinyItemSocketsComponent"]
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class DictionaryComponentResponseOfint64AndDestinyItemReusablePlugsComponent:
data: t.Mapping[str, "DestinyItemReusablePlugsComponent"]
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class DictionaryComponentResponseOfint64AndDestinyItemPlugObjectivesComponent:
data: t.Mapping[str, "DestinyItemPlugObjectivesComponent"]
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class DictionaryComponentResponseOfint64AndDestinyItemTalentGridComponent:
data: t.Mapping[str, "DestinyItemTalentGridComponent"]
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class DictionaryComponentResponseOfuint32AndDestinyItemPlugComponent:
data: t.Mapping[str, "DestinyItemPlugComponent"]
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class DictionaryComponentResponseOfint64AndDestinyCurrenciesComponent:
data: t.Mapping[str, "DestinyCurrenciesComponent"]
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class SingleComponentResponseOfDestinyCharacterComponent:
data: "DestinyCharacterComponent"
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class SingleComponentResponseOfDestinyCharacterProgressionComponent:
data: "DestinyCharacterProgressionComponent"
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class SingleComponentResponseOfDestinyCharacterRenderComponent:
data: "DestinyCharacterRenderComponent"
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class SingleComponentResponseOfDestinyCharacterActivitiesComponent:
data: "DestinyCharacterActivitiesComponent"
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class SingleComponentResponseOfDestinyCharacterRecordsComponent:
data: "DestinyCharacterRecordsComponent"
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class SingleComponentResponseOfDestinyCollectiblesComponent:
data: "DestinyCollectiblesComponent"
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class SingleComponentResponseOfDestinyCurrenciesComponent:
data: "DestinyCurrenciesComponent"
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class SingleComponentResponseOfDestinyItemComponent:
data: "DestinyItemComponent"
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class SingleComponentResponseOfDestinyItemInstanceComponent:
data: "DestinyItemInstanceComponent"
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class SingleComponentResponseOfDestinyItemObjectivesComponent:
data: "DestinyItemObjectivesComponent"
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class SingleComponentResponseOfDestinyItemPerksComponent:
data: "DestinyItemPerksComponent"
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class SingleComponentResponseOfDestinyItemRenderComponent:
data: "DestinyItemRenderComponent"
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class SingleComponentResponseOfDestinyItemStatsComponent:
data: "DestinyItemStatsComponent"
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class SingleComponentResponseOfDestinyItemTalentGridComponent:
data: "DestinyItemTalentGridComponent"
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class SingleComponentResponseOfDestinyItemSocketsComponent:
data: "DestinyItemSocketsComponent"
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class SingleComponentResponseOfDestinyItemReusablePlugsComponent:
data: "DestinyItemReusablePlugsComponent"
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class SingleComponentResponseOfDestinyItemPlugObjectivesComponent:
data: "DestinyItemPlugObjectivesComponent"
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class SingleComponentResponseOfDestinyVendorGroupComponent:
data: "DestinyVendorGroupComponent"
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class DictionaryComponentResponseOfuint32AndDestinyVendorComponent:
data: t.Mapping[str, "DestinyVendorComponent"]
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class DictionaryComponentResponseOfuint32AndDestinyVendorCategoriesComponent:
data: t.Mapping[str, "DestinyVendorCategoriesComponent"]
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class DestinyVendorSaleItemSetComponentOfDestinyVendorSaleItemComponent:
sale_items: t.Mapping[str, "DestinyVendorSaleItemComponent"]
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"saleItems": to_json(self.sale_items),
}
@dt.dataclass(frozen=True)
class DictionaryComponentResponseOfuint32AndPersonalDestinyVendorSaleItemSetComponent:
data: t.Mapping[str, "PersonalDestinyVendorSaleItemSetComponent"]
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class DestinyBaseItemComponentSetOfint32:
objectives: "DictionaryComponentResponseOfint32AndDestinyItemObjectivesComponent"
perks: "DictionaryComponentResponseOfint32AndDestinyItemPerksComponent"
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"objectives": to_json(self.objectives),
"perks": to_json(self.perks),
}
@dt.dataclass(frozen=True)
class DictionaryComponentResponseOfint32AndDestinyItemObjectivesComponent:
data: t.Mapping[str, "DestinyItemObjectivesComponent"]
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class DictionaryComponentResponseOfint32AndDestinyItemPerksComponent:
data: t.Mapping[str, "DestinyItemPerksComponent"]
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class DestinyItemComponentSetOfint32:
instances: "DictionaryComponentResponseOfint32AndDestinyItemInstanceComponent"
objectives: "DictionaryComponentResponseOfint32AndDestinyItemObjectivesComponent"
perks: "DictionaryComponentResponseOfint32AndDestinyItemPerksComponent"
plug_objectives: "DictionaryComponentResponseOfint32AndDestinyItemPlugObjectivesComponent"
plug_states: "DictionaryComponentResponseOfuint32AndDestinyItemPlugComponent"
render_data: "DictionaryComponentResponseOfint32AndDestinyItemRenderComponent"
reusable_plugs: "DictionaryComponentResponseOfint32AndDestinyItemReusablePlugsComponent"
sockets: "DictionaryComponentResponseOfint32AndDestinyItemSocketsComponent"
stats: "DictionaryComponentResponseOfint32AndDestinyItemStatsComponent"
talent_grids: "DictionaryComponentResponseOfint32AndDestinyItemTalentGridComponent"
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"instances": to_json(self.instances),
"renderData": to_json(self.render_data),
"stats": to_json(self.stats),
"sockets": to_json(self.sockets),
"reusablePlugs": to_json(self.reusable_plugs),
"plugObjectives": to_json(self.plug_objectives),
"talentGrids": to_json(self.talent_grids),
"plugStates": to_json(self.plug_states),
"objectives": to_json(self.objectives),
"perks": to_json(self.perks),
}
@dt.dataclass(frozen=True)
class DictionaryComponentResponseOfint32AndDestinyItemInstanceComponent:
data: t.Mapping[str, "DestinyItemInstanceComponent"]
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class DictionaryComponentResponseOfint32AndDestinyItemRenderComponent:
data: t.Mapping[str, "DestinyItemRenderComponent"]
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class DictionaryComponentResponseOfint32AndDestinyItemStatsComponent:
data: t.Mapping[str, "DestinyItemStatsComponent"]
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class DictionaryComponentResponseOfint32AndDestinyItemSocketsComponent:
data: t.Mapping[str, "DestinyItemSocketsComponent"]
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class DictionaryComponentResponseOfint32AndDestinyItemReusablePlugsComponent:
data: t.Mapping[str, "DestinyItemReusablePlugsComponent"]
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class DictionaryComponentResponseOfint32AndDestinyItemPlugObjectivesComponent:
data: t.Mapping[str, "DestinyItemPlugObjectivesComponent"]
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class DictionaryComponentResponseOfint32AndDestinyItemTalentGridComponent:
data: t.Mapping[str, "DestinyItemTalentGridComponent"]
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class SingleComponentResponseOfDestinyVendorComponent:
data: "DestinyVendorComponent"
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class SingleComponentResponseOfDestinyVendorCategoriesComponent:
data: "DestinyVendorCategoriesComponent"
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class DictionaryComponentResponseOfint32AndDestinyVendorSaleItemComponent:
data: t.Mapping[str, "DestinyVendorSaleItemComponent"]
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class DictionaryComponentResponseOfuint32AndDestinyPublicVendorComponent:
data: t.Mapping[str, "DestinyPublicVendorComponent"]
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class DestinyVendorSaleItemSetComponentOfDestinyPublicVendorSaleItemComponent:
sale_items: t.Mapping[str, "DestinyPublicVendorSaleItemComponent"]
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"saleItems": to_json(self.sale_items),
}
@dt.dataclass(frozen=True)
class DictionaryComponentResponseOfuint32AndPublicDestinyVendorSaleItemSetComponent:
data: t.Mapping[str, "PublicDestinyVendorSaleItemSetComponent"]
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class DestinyItemComponentSetOfuint32:
instances: "DictionaryComponentResponseOfuint32AndDestinyItemInstanceComponent"
objectives: "DictionaryComponentResponseOfuint32AndDestinyItemObjectivesComponent"
perks: "DictionaryComponentResponseOfuint32AndDestinyItemPerksComponent"
plug_objectives: "DictionaryComponentResponseOfuint32AndDestinyItemPlugObjectivesComponent"
plug_states: "DictionaryComponentResponseOfuint32AndDestinyItemPlugComponent"
render_data: "DictionaryComponentResponseOfuint32AndDestinyItemRenderComponent"
reusable_plugs: "DictionaryComponentResponseOfuint32AndDestinyItemReusablePlugsComponent"
sockets: "DictionaryComponentResponseOfuint32AndDestinyItemSocketsComponent"
stats: "DictionaryComponentResponseOfuint32AndDestinyItemStatsComponent"
talent_grids: "DictionaryComponentResponseOfuint32AndDestinyItemTalentGridComponent"
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"instances": to_json(self.instances),
"renderData": to_json(self.render_data),
"stats": to_json(self.stats),
"sockets": to_json(self.sockets),
"reusablePlugs": to_json(self.reusable_plugs),
"plugObjectives": to_json(self.plug_objectives),
"talentGrids": to_json(self.talent_grids),
"plugStates": to_json(self.plug_states),
"objectives": to_json(self.objectives),
"perks": to_json(self.perks),
}
@dt.dataclass(frozen=True)
class DictionaryComponentResponseOfuint32AndDestinyItemInstanceComponent:
data: t.Mapping[str, "DestinyItemInstanceComponent"]
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class DictionaryComponentResponseOfuint32AndDestinyItemRenderComponent:
data: t.Mapping[str, "DestinyItemRenderComponent"]
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class DictionaryComponentResponseOfuint32AndDestinyItemStatsComponent:
data: t.Mapping[str, "DestinyItemStatsComponent"]
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class DictionaryComponentResponseOfuint32AndDestinyItemSocketsComponent:
data: t.Mapping[str, "DestinyItemSocketsComponent"]
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class DictionaryComponentResponseOfuint32AndDestinyItemReusablePlugsComponent:
data: t.Mapping[str, "DestinyItemReusablePlugsComponent"]
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class DictionaryComponentResponseOfuint32AndDestinyItemPlugObjectivesComponent:
data: t.Mapping[str, "DestinyItemPlugObjectivesComponent"]
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class DictionaryComponentResponseOfuint32AndDestinyItemTalentGridComponent:
data: t.Mapping[str, "DestinyItemTalentGridComponent"]
privacy: "ComponentPrivacySetting"
disabled: t.Optional[bool] = dt.field(
default=None, metadata={"description": "If true, this component is disabled."}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"data": to_json(self.data),
"privacy": to_json(self.privacy),
"disabled": to_json(self.disabled),
}
@dt.dataclass(frozen=True)
class SearchResultOfDestinyEntitySearchResultItem:
has_more: bool
query: "PagedQuery"
replacement_continuation_token: str
results: t.Sequence["DestinyEntitySearchResultItem"]
total_results: int
use_total_results: bool = dt.field(
metadata={
"description": """If useTotalResults is true, then totalResults represents an accurate count.
If False, it does not, and may be estimated/only the size of the current page.
Either way, you should probably always only trust hasMore.
This is a long-held historical throwback to when we used to do paging with known total results. Those queries toasted our database, and we were left to hastily alter our endpoints and create backward- compatible shims, of which useTotalResults is one."""
}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"results": to_json(self.results),
"totalResults": to_json(self.total_results),
"hasMore": to_json(self.has_more),
"query": to_json(self.query),
"replacementContinuationToken": to_json(
self.replacement_continuation_token
),
"useTotalResults": to_json(self.use_total_results),
}
@dt.dataclass(frozen=True)
class SearchResultOfTrendingEntry:
has_more: bool
query: "PagedQuery"
replacement_continuation_token: str
results: t.Sequence["TrendingEntry"]
total_results: int
use_total_results: bool = dt.field(
metadata={
"description": """If useTotalResults is true, then totalResults represents an accurate count.
If False, it does not, and may be estimated/only the size of the current page.
Either way, you should probably always only trust hasMore.
This is a long-held historical throwback to when we used to do paging with known total results. Those queries toasted our database, and we were left to hastily alter our endpoints and create backward- compatible shims, of which useTotalResults is one."""
}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"results": to_json(self.results),
"totalResults": to_json(self.total_results),
"hasMore": to_json(self.has_more),
"query": to_json(self.query),
"replacementContinuationToken": to_json(
self.replacement_continuation_token
),
"useTotalResults": to_json(self.use_total_results),
}
@dt.dataclass(frozen=True)
class SearchResultOfFireteamSummary:
has_more: bool
query: "PagedQuery"
replacement_continuation_token: str
results: t.Sequence["FireteamSummary"]
total_results: int
use_total_results: bool = dt.field(
metadata={
"description": """If useTotalResults is true, then totalResults represents an accurate count.
If False, it does not, and may be estimated/only the size of the current page.
Either way, you should probably always only trust hasMore.
This is a long-held historical throwback to when we used to do paging with known total results. Those queries toasted our database, and we were left to hastily alter our endpoints and create backward- compatible shims, of which useTotalResults is one."""
}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"results": to_json(self.results),
"totalResults": to_json(self.total_results),
"hasMore": to_json(self.has_more),
"query": to_json(self.query),
"replacementContinuationToken": to_json(
self.replacement_continuation_token
),
"useTotalResults": to_json(self.use_total_results),
}
@dt.dataclass(frozen=True)
class SearchResultOfFireteamResponse:
has_more: bool
query: "PagedQuery"
replacement_continuation_token: str
results: t.Sequence["FireteamResponse"]
total_results: int
use_total_results: bool = dt.field(
metadata={
"description": """If useTotalResults is true, then totalResults represents an accurate count.
If False, it does not, and may be estimated/only the size of the current page.
Either way, you should probably always only trust hasMore.
This is a long-held historical throwback to when we used to do paging with known total results. Those queries toasted our database, and we were left to hastily alter our endpoints and create backward- compatible shims, of which useTotalResults is one."""
}
)
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"results": to_json(self.results),
"totalResults": to_json(self.total_results),
"hasMore": to_json(self.has_more),
"query": to_json(self.query),
"replacementContinuationToken": to_json(
self.replacement_continuation_token
),
"useTotalResults": to_json(self.use_total_results),
}
@dt.dataclass(frozen=True)
class GlobalAlert:
alert_html: str
alert_key: str
alert_level: "GlobalAlertLevel"
alert_link: str
alert_timestamp: str
alert_type: "GlobalAlertType"
stream_info: "StreamInfo"
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"AlertKey": to_json(self.alert_key),
"AlertHtml": to_json(self.alert_html),
"AlertTimestamp": to_json(self.alert_timestamp),
"AlertLink": to_json(self.alert_link),
"AlertLevel": to_json(self.alert_level),
"AlertType": to_json(self.alert_type),
"StreamInfo": to_json(self.stream_info),
}
class GlobalAlertLevel(Enum):
UNKNOWN = 0
BLUE = 1
YELLOW = 2
RED = 3
class GlobalAlertType(Enum):
GLOBAL_ALERT = 0
STREAMING_ALERT = 1
@dt.dataclass(frozen=True)
class StreamInfo:
channel_name: str
def to_json(self) -> t.Mapping[str, t.Any]:
return {
"ChannelName": to_json(self.channel_name),
}
from bungieapi.generated.components.schemas.components import ( # noqa: E402
ComponentPrivacySetting,
)
# imported at the end to do not case circular imports for type annotations
from bungieapi.generated.components.schemas.content import ( # noqa: E402
ContentItemPublicContract,
)
from bungieapi.generated.components.schemas.destiny.components.collectibles import ( # noqa: E402
DestinyCollectiblesComponent,
DestinyProfileCollectiblesComponent,
)
from bungieapi.generated.components.schemas.destiny.components.craftables import ( # noqa: E402
DestinyCraftablesComponent,
)
from bungieapi.generated.components.schemas.destiny.components.inventory import ( # noqa: E402
DestinyCurrenciesComponent,
DestinyPlatformSilverComponent,
)
from bungieapi.generated.components.schemas.destiny.components.items import ( # noqa: E402
DestinyItemPlugComponent,
DestinyItemPlugObjectivesComponent,
DestinyItemReusablePlugsComponent,
)
from bungieapi.generated.components.schemas.destiny.components.kiosks import ( # noqa: E402
DestinyKiosksComponent,
)
from bungieapi.generated.components.schemas.destiny.components.metrics import ( # noqa: E402
DestinyMetricsComponent,
)
from bungieapi.generated.components.schemas.destiny.components.plug_sets import ( # noqa: E402
DestinyPlugSetsComponent,
)
from bungieapi.generated.components.schemas.destiny.components.presentation import ( # noqa: E402
DestinyPresentationNodesComponent,
)
from bungieapi.generated.components.schemas.destiny.components.profiles import ( # noqa: E402
DestinyProfileProgressionComponent,
DestinyProfileTransitoryComponent,
)
from bungieapi.generated.components.schemas.destiny.components.records import ( # noqa: E402
DestinyCharacterRecordsComponent,
DestinyProfileRecordsComponent,
)
from bungieapi.generated.components.schemas.destiny.components.string_variables import ( # noqa: E402
DestinyStringVariablesComponent,
)
from bungieapi.generated.components.schemas.destiny.components.vendors import ( # noqa: E402
DestinyPublicVendorComponent,
DestinyPublicVendorSaleItemComponent,
DestinyVendorGroupComponent,
)
from bungieapi.generated.components.schemas.destiny.definitions import ( # noqa: E402
DestinyEntitySearchResultItem,
)
from bungieapi.generated.components.schemas.destiny.entities.characters import ( # noqa: E402
DestinyCharacterActivitiesComponent,
DestinyCharacterComponent,
DestinyCharacterProgressionComponent,
DestinyCharacterRenderComponent,
)
from bungieapi.generated.components.schemas.destiny.entities.inventory import ( # noqa: E402
DestinyInventoryComponent,
)
from bungieapi.generated.components.schemas.destiny.entities.items import ( # noqa: E402
DestinyItemComponent,
DestinyItemInstanceComponent,
DestinyItemObjectivesComponent,
DestinyItemPerksComponent,
DestinyItemRenderComponent,
DestinyItemSocketsComponent,
DestinyItemStatsComponent,
DestinyItemTalentGridComponent,
)
from bungieapi.generated.components.schemas.destiny.entities.profiles import ( # noqa: E402
DestinyProfileComponent,
DestinyVendorReceiptsComponent,
)
from bungieapi.generated.components.schemas.destiny.entities.vendors import ( # noqa: E402
DestinyVendorCategoriesComponent,
DestinyVendorComponent,
DestinyVendorSaleItemComponent,
)
from bungieapi.generated.components.schemas.destiny.responses import ( # noqa: E402
PersonalDestinyVendorSaleItemSetComponent,
PublicDestinyVendorSaleItemSetComponent,
)
from bungieapi.generated.components.schemas.fireteam import ( # noqa: E402
FireteamResponse,
FireteamSummary,
)
from bungieapi.generated.components.schemas.forum import PostResponse # noqa: E402
from bungieapi.generated.components.schemas.groups_v2 import GroupBan # noqa: E402
from bungieapi.generated.components.schemas.groups_v2 import GroupMember # noqa: E402
from bungieapi.generated.components.schemas.groups_v2 import GroupV2Card # noqa: E402
from bungieapi.generated.components.schemas.groups_v2 import ( # noqa: E402
GroupMemberApplication,
GroupMembership,
GroupPotentialMembership,
)
from bungieapi.generated.components.schemas.queries import PagedQuery # noqa: E402
from bungieapi.generated.components.schemas.trending import TrendingEntry # noqa: E402
| 35.882114
| 254
| 0.677807
| 7,149
| 70,616
| 6.600643
| 0.056931
| 0.058617
| 0.097482
| 0.045393
| 0.738726
| 0.72688
| 0.72493
| 0.703971
| 0.703971
| 0.703971
| 0
| 0.005549
| 0.213932
| 70,616
| 1,967
| 255
| 35.900356
| 0.844548
| 0.013042
| 0
| 0.663809
| 1
| 0.007348
| 0.276602
| 0.100363
| 0
| 0
| 0
| 0
| 0
| 1
| 0.062462
| false
| 0
| 0.020208
| 0.062462
| 0.446418
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
fc62779da8250b9f8530c89236dee1e3e82f7bfc
| 165
|
py
|
Python
|
repixelator/cmd.py
|
yclee126/RePixelator
|
7bef8f6f667964fff4c244065f4375c94ccf1b2d
|
[
"MIT"
] | null | null | null |
repixelator/cmd.py
|
yclee126/RePixelator
|
7bef8f6f667964fff4c244065f4375c94ccf1b2d
|
[
"MIT"
] | null | null | null |
repixelator/cmd.py
|
yclee126/RePixelator
|
7bef8f6f667964fff4c244065f4375c94ccf1b2d
|
[
"MIT"
] | null | null | null |
import sys
def cmd():
args = sys.argv[1:]
from .repixelator import main
main(args)
def gui():
from .repixelator_gui import main
main()
| 16.5
| 38
| 0.6
| 22
| 165
| 4.454545
| 0.5
| 0.306122
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008621
| 0.29697
| 165
| 10
| 39
| 16.5
| 0.836207
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.375
| 0
| 0.625
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
fc7553a8aea40df314ff24e55b131aeef58b917d
| 23
|
py
|
Python
|
vid_seg/__init__.py
|
YutingZhang/zcode
|
79f4a990298ccf21b5de569821a84a8553220d3f
|
[
"Apache-2.0"
] | null | null | null |
vid_seg/__init__.py
|
YutingZhang/zcode
|
79f4a990298ccf21b5de569821a84a8553220d3f
|
[
"Apache-2.0"
] | null | null | null |
vid_seg/__init__.py
|
YutingZhang/zcode
|
79f4a990298ccf21b5de569821a84a8553220d3f
|
[
"Apache-2.0"
] | null | null | null |
from .vid_seg import *
| 11.5
| 22
| 0.73913
| 4
| 23
| 4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173913
| 23
| 1
| 23
| 23
| 0.842105
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
fc85b898843e547b15b543abe0d5287ea080039b
| 15,909
|
py
|
Python
|
CNS_analysis/overlapCnsWithSNPs.py
|
baoxingsong/CNSpublication
|
00540e13be60631e2ea6f337944101e78c45119c
|
[
"MIT"
] | null | null | null |
CNS_analysis/overlapCnsWithSNPs.py
|
baoxingsong/CNSpublication
|
00540e13be60631e2ea6f337944101e78c45119c
|
[
"MIT"
] | null | null | null |
CNS_analysis/overlapCnsWithSNPs.py
|
baoxingsong/CNSpublication
|
00540e13be60631e2ea6f337944101e78c45119c
|
[
"MIT"
] | null | null | null |
#!python
import re
import subprocess
import sys
from argparse import ArgumentParser
import sys
#read a fasta file and return a dictionary, the key is entry id and the value is the sequence in upcase
from utils import readFastaFile
from utils import str2bool
import re
class SNP:
def __init__(self, chr, v4cordinate, depth, mpileup):
self.chr = chr
self.v4cordinate = v4cordinate
self.depth = depth
self.mpileup = mpileup
def __str__(self):
return (self.chr + "\t" + "\t" + str(self.v4cordinate) + "\t" + str(self.depth) + "\t" + str(self.mpileup))
if __name__ == '__main__':
parser = ArgumentParser(description='count number of based overlap between CNS bam output and the eQTL result,'
'please input the vcf file and the eqtl for one chromosome only')
parser.add_argument("-g", "--genome",
dest="genome",
type=str,
default="",
help="the masked reference genome file")
parser.add_argument("-b", "--bam",
dest="bam",
type=str,
default="",
help="the output of and-CNS pipeline in bam format")
parser.add_argument("-c", "--chr",
dest="chr",
type=str,
default="",
help="the chromosome to be analysised")
parser.add_argument("-s", "--mask",
dest="mask",
type=str2bool,
default=True,
help="only count the non-masking region SNP and genome length")
parser.add_argument("-v", "--vcf",
dest="vcf",
type=str,
default="",
help="the B73 v4 variant file in vcf format")
parser.add_argument("-m", "--bim",
dest="bim",
type=str,
default="",
help="the B73 v4 variant file in plink bim format")
args = parser.parse_args()
if args.genome == "":
print("Error: please specify --genome", file=sys.stderr)
parser.print_help()
sys.exit(1)
if args.bam == "":
print("Error: please specify --bam", file=sys.stderr)
parser.print_help()
sys.exit(1)
if args.vcf == "" and args.bim == "":
print("Error: please specify --vcf or --bim", file=sys.stderr)
parser.print_help()
sys.exit(1)
if args.chr == "":
print("Error: please specify --chr", file=sys.stderr)
parser.print_help()
sys.exit(1)
reference_genome = readFastaFile(args.genome)
print("reference genome reading done", file=sys.stderr)
totalDepth = 0
totalMpileup = 0
chr = args.chr
snps = dict()
# print("SNP reading done", file=sys.stderr)
seq = reference_genome[chr]
seq = re.sub("\\s", "", seq)
seq = re.sub("-", "", seq)
print ("chr" + chr)
genomelength = len(reference_genome[chr])
if args.mask:
seq = seq.replace("n", "")
seq = seq.replace("N", "")
seq = seq.replace("b", "")
seq = seq.replace("B", "")
# read the VCF file begin
if args.vcf != "" :
with open(args.vcf) as f:
for line in f:
if line[0] is not '#':
elements = line.split('\t')
if (chr == elements[0] and (reference_genome[chr][int(elements[1])-1] is not 'n') and (reference_genome[chr][int(elements[1])-1] is not 'b') and (reference_genome[chr][int(elements[1])-1] is not 'N') ) :
s = SNP(elements[0], int(elements[1]), 0, 0)
snps[elements[1]] = s
else:
with open(args.bim) as f:
for line in f:
if line[0] is not '#':
elements = line.split('\t')
if (chr == elements[0] and (reference_genome[chr][int(elements[1])-1] is not 'n') and (reference_genome[chr][int(elements[1])-1] is not 'b') and (reference_genome[chr][int(elements[1])-1] is not 'N') ) :
s = SNP(elements[0], int(elements[3]), 0, 0)
snps[elements[3]] = s
# print("vcf file reading done", file=sys.stderr)
for line2 in subprocess.run(['samtools', 'depth', '-r', chr, args.bam], stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding='utf8').stdout.split("\n"):
if len(line2) > 0:
elements2 = line2.split('\t')
position = elements2[1]
if int(elements2[2]) > 0 and (reference_genome[chr][int(elements2[1])-1] is not 'n') and (reference_genome[chr][int(elements2[1])-1] is not 'N') and (reference_genome[chr][int(elements2[1])-1] is not 'b') and (reference_genome[chr][int(elements2[1])-1] is not 'B'):
totalDepth = totalDepth + 1
if position in snps:
snps[position].depth = int(elements2[2])
# print("samtools depth done", file=sys.stderr)
for line2 in subprocess.run(['samtools', 'mpileup', '-r', chr, args.bam], stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding='utf8').stdout.split("\n"):
if len(line2) > 0:
elements2 = line2.split('\t')
position = elements2[1]
if int(elements2[3]) > 0 and (reference_genome[chr][int(elements2[1])-1] is not 'n') and (reference_genome[chr][int(elements2[1])-1] is not 'N') and (reference_genome[chr][int(elements2[1])-1] is not 'b') and (reference_genome[chr][int(elements2[1])-1] is not 'B'):
totalMpileup = totalMpileup + 1
if position in snps:
snps[position].mpileup = int(elements2[3])
else:
if args.vcf != "":
with open(args.vcf) as f:
for line in f:
if line[0] is not '#':
elements = line.split('\t')
if chr == elements[0]:
s = SNP(elements[0], int(elements[1]), 0, 0)
snps[elements[1]] = s
else:
with open(args.bim) as f:
for line in f:
if line[0] is not '#':
elements = line.split('\t')
if (chr == elements[0]):
s = SNP(elements[0], int(elements[3]), 0, 0)
snps[elements[3]] = s
# print("vcf file reading done", file=sys.stderr)
for line2 in subprocess.run(['samtools', 'depth', '-r', chr, args.bam], stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding='utf8').stdout.split("\n"):
if len(line2) > 0:
elements2 = line2.split('\t')
position = elements2[1]
if int(elements2[2]) > 0:
totalDepth = totalDepth + 1
if position in snps:
snps[position].depth = int(elements2[2])
# print("samtools depth done", file=sys.stderr)
for line2 in subprocess.run(['samtools', 'mpileup', '-r', chr, args.bam], stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding='utf8').stdout.split("\n"):
if len(line2) > 0:
elements2 = line2.split('\t')
position = elements2[1]
if int(elements2[3]) > 0:
totalMpileup = totalMpileup + 1
if position in snps:
snps[position].mpileup = int(elements2[3])
number_snp_depth = 0
number_snp_mpileup = 0
for position in snps:
if snps[position].depth > 0:
number_snp_depth = number_snp_depth + 1
if snps[position].mpileup > 0:
number_snp_mpileup = number_snp_mpileup + 1
print ("number_SNPs\t" + str(len(snps)))
print ("totalDepth\t" + str(totalDepth))
print ("totalMpileup\t" + str(totalMpileup))
print ("totalChrLength\t" + str(len(seq)))
print ("number_snp_depth\t" + str(number_snp_depth))
print ("number_snp_mpileup\t" + str(number_snp_mpileup))
#
# if __name__ == '__main__':
# parser = ArgumentParser(description='count number of based overlap between CNS bam output and the eQTL result,'
# 'please input the vcf file and the eqtl for one chromosome only')
# parser.add_argument("-g", "--genome",
# dest="genome",
# type=str,
# default="",
# help="the masked reference genome file")
#
# parser.add_argument("-b", "--bam",
# dest="bam",
# type=str,
# default="",
# help="the output of and-CNS pipeline in bam format")
#
# parser.add_argument("-m", "--hapmap",
# dest="hapmap",
# type=str,
# default="",
# help="hapmap file")
#
# parser.add_argument("-s", "--mask",
# dest="mask",
# type=str2bool,
# default=True,
# help="only count the non-masking region SNP and genome length")
#
# parser.add_argument("-v", "--vcf",
# dest="vcf",
# type=str,
# default="",
# help="the B73 v4 variant file in vcf format")
# args = parser.parse_args()
#
#
# if args.genome == "":
# print("Error: please specify --genome", file=sys.stderr)
# parser.print_help()
# sys.exit(1)
#
# if args.bam == "":
# print("Error: please specify --bam", file=sys.stderr)
# parser.print_help()
# sys.exit(1)
#
# if args.hapmap == "":
# print("Error: please specify --hapmap", file=sys.stderr)
# parser.print_help()
# sys.exit(1)
#
# if args.vcf == "":
# print("Error: please specify --vcf", file=sys.stderr)
# parser.print_help()
# sys.exit(1)
#
# reference_genome = readFastaFile(args.genome)
# print("reference genome reading done", file=sys.stderr)
#
# totalDepth = 0
# totalMpileup = 0
#
# chr = ""
# snps_dict = dict()
# sig_v4cordinate_dict = dict()
#
# # read the SNP hapmap begin
# with open(args.hapmap) as f:
# for line in f:
# if line[0] is 'S':
# elements = line[:100].split('\t')
# chr = elements[2]
# s = SNP(elements[2], int(elements[3]), int(elements[3]), 0, 0)
# snps_dict[elements[0]] = s
# # read the SNP hapmap end
# # print("SNP reading done", file=sys.stderr)
# seq = reference_genome[chr]
# seq = re.sub("\\s", "", seq)
# seq = re.sub("-", "", seq)
# print ("chr" + chr)
# if args.mask:
# # read the VCF file begin
# with open(args.vcf) as f:
# for line in f:
# if line[0] is not '#':
# elements = line.split('\t')
# if elements[0] == chr:
# elements2 = elements[2].split('-')
# variant_id = "S" + elements2[0] + "_" + elements2[1]
# if (variant_id in snps_dict) and (reference_genome[chr][int(elements[1])-1] is not 'n'):
# sig_v4cordinate_dict[elements[1]] = snps_dict[variant_id]
# sig_v4cordinate_dict[elements[1]].v4cordinate = int(elements[1])
# # print("vcf file reading done", file=sys.stderr)
#
# for line2 in subprocess.run(['samtools', 'depth', '-r', chr, args.bam], stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding='utf8').stdout.split("\n"):
# if len(line2) > 0:
# elements2 = line2.split('\t')
# position = elements2[1]
# if int(elements2[2]) > 0 and (reference_genome[chr][int(elements2[1])-1] is not 'n'):
# totalDepth = totalDepth + 1
# if position in sig_v4cordinate_dict:
# sig_v4cordinate_dict[position].depth = int(elements2[2])
# # print("samtools depth done", file=sys.stderr)
#
# for line2 in subprocess.run(['samtools', 'mpileup', '-r', chr, args.bam], stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding='utf8').stdout.split("\n"):
# if len(line2) > 0:
# elements2 = line2.split('\t')
# position = elements2[1]
# if int(elements2[3]) > 0 and (reference_genome[chr][int(elements2[1])-1] is not 'n'):
# totalMpileup = totalMpileup + 1
# if position in sig_v4cordinate_dict:
# sig_v4cordinate_dict[position].mpileup = int(elements2[3])
# seq = re.sub("n", "", seq)
# else:
# with open(args.vcf) as f:
# for line in f:
# if line[0] is not '#':
# elements = line.split('\t')
# if elements[0] == chr:
# elements2 = elements[2].split('-')
# variant_id = "S" + elements2[0] + "_" + elements2[1]
# if (variant_id in snps_dict):
# sig_v4cordinate_dict[elements[1]] = snps_dict[variant_id]
# sig_v4cordinate_dict[elements[1]].v4cordinate = int(elements[1])
# # print("vcf file reading done", file=sys.stderr)
#
# for line2 in subprocess.run(['samtools', 'depth', '-r', chr, args.bam], stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding='utf8').stdout.split("\n"):
# if len(line2) > 0:
# elements2 = line2.split('\t')
# position = elements2[1]
# if int(elements2[2]) > 0:
# totalDepth = totalDepth + 1
# if position in sig_v4cordinate_dict:
# sig_v4cordinate_dict[position].depth = int(elements2[2])
# # print("samtools depth done", file=sys.stderr)
#
# for line2 in subprocess.run(['samtools', 'mpileup', '-r', chr, args.bam], stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding='utf8').stdout.split("\n"):
# if len(line2) > 0:
# elements2 = line2.split('\t')
# position = elements2[1]
# if int(elements2[3]) > 0:
# totalMpileup = totalMpileup + 1
# if position in sig_v4cordinate_dict:
# sig_v4cordinate_dict[position].mpileup = int(elements2[3])
#
# number_eqtl_depth = 0
# number_eqtl_mpileup = 0
# for position in sig_v4cordinate_dict:
# if sig_v4cordinate_dict[position].depth > 0:
# number_eqtl_depth = number_eqtl_depth + 1
# if sig_v4cordinate_dict[position].mpileup > 0:
# number_eqtl_mpileup = number_eqtl_mpileup + 1
# # print(sig_v4cordinate_dict[position])
#
#
# print ("total_number_of_SNPs_loci\t" + str(len(snps_dict)))
# missing_number = len(snps_dict) - len(sig_v4cordinate_dict)
# print ("number_of_missing_SNPs_in_V4_genotype\t" + str(missing_number))
# print ("number_of_non-missing_SNPs_in_V4_genotype\t" + str(len(sig_v4cordinate_dict)))
# print ("totalDepth\t" + str(totalDepth))
# print ("totalMpileup\t" + str(totalMpileup))
# print ("totalChrLength\t" + str(len(seq)))
# print ("number_snp_depth\t" + str(number_eqtl_depth))
# print ("number_snp_mpileup\t" + str(number_eqtl_mpileup))
| 44.191667
| 281
| 0.514049
| 1,829
| 15,909
| 4.372881
| 0.079825
| 0.048762
| 0.032508
| 0.044636
| 0.845586
| 0.807452
| 0.803701
| 0.791823
| 0.782821
| 0.782821
| 0
| 0.026915
| 0.346093
| 15,909
| 359
| 282
| 44.314763
| 0.741901
| 0.476397
| 0
| 0.50625
| 0
| 0
| 0.102614
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.0125
| false
| 0
| 0.05
| 0.00625
| 0.075
| 0.1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
fc8f8582a7a4f7ce96b1e0b353852f829f46e961
| 1,345
|
py
|
Python
|
PgCognition/Schema/Grants.py
|
mathewmoon/pgcog
|
a07bcb6ddd03dbb3665958341dcca7cef039eef1
|
[
"Apache-2.0"
] | 1
|
2021-05-15T14:29:26.000Z
|
2021-05-15T14:29:26.000Z
|
PgCognition/Schema/Grants.py
|
mathewmoon/pgcog
|
a07bcb6ddd03dbb3665958341dcca7cef039eef1
|
[
"Apache-2.0"
] | null | null | null |
PgCognition/Schema/Grants.py
|
mathewmoon/pgcog
|
a07bcb6ddd03dbb3665958341dcca7cef039eef1
|
[
"Apache-2.0"
] | null | null | null |
GRANTS = """
SELECT cognition.createrole('tenant_admins', NULL, NULL);
GRANT USAGE ON SCHEMA cognition TO tenant_admins;
GRANT SELECT, INSERT, DELETE, UPDATE ON TABLE cognition.users TO tenant_admins;
GRANT SELECT, UPDATE (displayname) ON TABLE cognition.tenants to tenant_admins;
SELECT cognition.createrole('tenant_users', NULL, NULL);
GRANT USAGE ON SCHEMA cognition TO tenant_users;
GRANT UPDATE (
first_name,
last_name,
user_preferences
) ON TABLE cognition.users TO tenant_users;
GRANT SELECT ON TABLE cognition.users to tenant_users;
GRANT SELECT ON TABLE cognition.tenants to tenant_users;
GRANT EXECUTE ON FUNCTION cognition.gettenants TO GROUP tenant_admins;
GRANT EXECUTE ON FUNCTION cognition.gettenants TO GROUP tenant_users;
GRANT EXECUTE ON FUNCTION cognition.gettenants TO GROUP application_owner;
GRANT EXECUTE ON FUNCTION cognition.groupsof TO GROUP tenant_admins;
GRANT EXECUTE ON FUNCTION cognition.groupsof TO GROUP tenant_users;
GRANT EXECUTE ON FUNCTION cognition.groupsof TO GROUP application_owner;
GRANT EXECUTE ON FUNCTION cognition.tenantrole TO GROUP tenant_admins;
GRANT EXECUTE ON FUNCTION cognition.tenantrole TO GROUP tenant_users;
GRANT EXECUTE ON FUNCTION cognition.tenantrole TO GROUP application_owner;
"""
| 51.730769
| 83
| 0.771747
| 177
| 1,345
| 5.745763
| 0.180791
| 0.106195
| 0.123894
| 0.19469
| 0.819076
| 0.785644
| 0.717797
| 0.717797
| 0.693215
| 0.220256
| 0
| 0
| 0.181413
| 1,345
| 25
| 84
| 53.8
| 0.923706
| 0
| 0
| 0
| 0
| 0
| 0.988104
| 0.054275
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
fc993220492259c2b840912c0a4d0859e8b547fb
| 7,804
|
py
|
Python
|
lumen/tests/sources/test_intake_sql.py
|
holoviz/monitor
|
db04d037c17101b9e126973a21e77f940f6cf83c
|
[
"BSD-3-Clause"
] | 1
|
2020-09-25T20:21:59.000Z
|
2020-09-25T20:21:59.000Z
|
lumen/tests/sources/test_intake_sql.py
|
holoviz/monitor
|
db04d037c17101b9e126973a21e77f940f6cf83c
|
[
"BSD-3-Clause"
] | 3
|
2020-09-24T16:59:03.000Z
|
2020-10-01T12:32:49.000Z
|
lumen/tests/sources/test_intake_sql.py
|
holoviz/monitor
|
db04d037c17101b9e126973a21e77f940f6cf83c
|
[
"BSD-3-Clause"
] | null | null | null |
import datetime as dt
import os
import pandas as pd
from lumen.sources.intake_sql import IntakeSQLSource
from lumen.transforms.sql import SQLGroupBy
def test_intake_sql_get():
root = os.path.dirname(__file__)
source = IntakeSQLSource(
uri=os.path.join(root, 'catalog.yml'), root=root
)
df = pd._testing.makeMixedDataFrame()
pd.testing.assert_frame_equal(source.get('test_sql'), df)
def test_intake_sql_get_schema():
root = os.path.dirname(__file__)
source = IntakeSQLSource(
uri=os.path.join(root, 'catalog.yml'), root=root
)
expected_sql = {
'A': {'inclusiveMaximum': 4.0, 'inclusiveMinimum': 0.0, 'type': 'number'},
'B': {'inclusiveMaximum': 1.0, 'inclusiveMinimum': 0.0, 'type': 'number'},
'C': {'enum': ['foo1', 'foo2', 'foo3', 'foo4', 'foo5'], 'type': 'string'},
'D': {
'format': 'datetime',
'inclusiveMaximum': '2009-01-07 00:00:00',
'inclusiveMinimum': '2009-01-01 00:00:00',
'type': 'string'
}
}
expected_csv = dict(expected_sql, D={
'format': 'datetime',
'inclusiveMaximum': '2009-01-07T00:00:00',
'inclusiveMinimum': '2009-01-01T00:00:00',
'type': 'string'
})
assert source.get_schema('test_sql') == expected_sql
assert 'test' not in source._schema_cache
assert 'test_sql' in source._schema_cache
assert source.get_schema('test') == expected_csv
assert 'test' in source._schema_cache
assert 'test_sql' in source._schema_cache
def test_intake_sql_get_schema_with_none():
root = os.path.dirname(__file__)
source = IntakeSQLSource(
uri=os.path.join(root, 'catalog.yml'), root=root
)
expected_sql = {
'A': {'inclusiveMaximum': 4.0, 'inclusiveMinimum': 0.0, 'type': 'number'},
'B': {'inclusiveMaximum': 1.0, 'inclusiveMinimum': 0.0, 'type': 'number'},
'C': {'enum': ['foo1', None, 'foo3', 'foo5'], 'type': 'string'},
'D': {
'format': 'datetime',
'inclusiveMaximum': '2009-01-07 00:00:00',
'inclusiveMinimum': '2009-01-01 00:00:00',
'type': 'string'
}
}
assert source.get_schema('test_sql_with_none') == expected_sql
assert 'test' not in source._schema_cache
assert 'test_sql_with_none' in source._schema_cache
def test_intake_sql_transforms():
root = os.path.dirname(__file__)
source = IntakeSQLSource(
uri=os.path.join(root, 'catalog.yml'), root=root
)
df = pd._testing.makeMixedDataFrame()
transforms = [SQLGroupBy(by=['B'], aggregates={'SUM': 'A'})]
transformed = source.get('test_sql', sql_transforms=transforms)
expected = df.groupby('B')['A'].sum().reset_index()
pd.testing.assert_frame_equal(transformed, expected)
source.clear_cache()
def test_intake_sql_filter_int():
root = os.path.dirname(__file__)
source = IntakeSQLSource(
uri=os.path.join(root, 'catalog.yml'), root=root
)
df = pd._testing.makeMixedDataFrame()
filtered = source.get('test_sql', A=1)
expected = df[df.A==1].reset_index(drop=True)
pd.testing.assert_frame_equal(filtered, expected)
source.clear_cache()
def test_intake_sql_filter_None():
root = os.path.dirname(__file__)
source = IntakeSQLSource(
uri=os.path.join(root, 'catalog.yml'), root=root
)
df = pd._testing.makeMixedDataFrame()
filtered = source.get('test_sql_with_none', C=None)
expected = df[(df.A==1) | (df.A==3)].reset_index(drop=True)
expected['C'] = None
pd.testing.assert_frame_equal(filtered, expected)
source.clear_cache()
def test_intake_sql_filter_str():
root = os.path.dirname(__file__)
source = IntakeSQLSource(
uri=os.path.join(root, 'catalog.yml'), root=root
)
df = pd._testing.makeMixedDataFrame()
filtered = source.get('test_sql', C='foo2')
expected = df[df.C=='foo2'].reset_index(drop=True)
pd.testing.assert_frame_equal(filtered, expected)
def test_intake_sql_filter_int_range():
root = os.path.dirname(__file__)
source = IntakeSQLSource(
uri=os.path.join(root, 'catalog.yml'), root=root
)
df = pd._testing.makeMixedDataFrame()
filtered = source.get('test_sql', A=(1, 3))
expected = df[(df.A>=1) & (df.A<=3)].reset_index(drop=True)
pd.testing.assert_frame_equal(filtered, expected)
def test_intake_sql_filter_date():
root = os.path.dirname(__file__)
source = IntakeSQLSource(
uri=os.path.join(root, 'catalog.yml'), root=root
)
df = pd._testing.makeMixedDataFrame()
filtered = source.get('test_sql', D=dt.date(2009, 1, 2))
expected = df.iloc[1:2].reset_index(drop=True)
pd.testing.assert_frame_equal(filtered, expected)
def test_intake_sql_filter_datetime():
root = os.path.dirname(__file__)
source = IntakeSQLSource(
uri=os.path.join(root, 'catalog.yml'), root=root
)
df = pd._testing.makeMixedDataFrame()
filtered = source.get('test_sql', D=dt.datetime(2009, 1, 2))
expected = df.iloc[1:2].reset_index(drop=True)
pd.testing.assert_frame_equal(filtered, expected)
def test_intake_sql_filter_datetime_range():
root = os.path.dirname(__file__)
source = IntakeSQLSource(
uri=os.path.join(root, 'catalog.yml'), root=root
)
df = pd._testing.makeMixedDataFrame()
filtered = source.get('test_sql', D=(dt.datetime(2009, 1, 2), dt.datetime(2009, 1, 5)))
expected = df.iloc[1:3].reset_index(drop=True)
pd.testing.assert_frame_equal(filtered, expected)
def test_intake_sql_filter_date_range():
root = os.path.dirname(__file__)
source = IntakeSQLSource(
uri=os.path.join(root, 'catalog.yml'), root=root
)
df = pd._testing.makeMixedDataFrame()
filtered = source.get('test_sql', D=(dt.date(2009, 1, 2), dt.date(2009, 1, 5)))
expected = df.iloc[1:3].reset_index(drop=True)
pd.testing.assert_frame_equal(filtered, expected)
def test_intake_sql_filter_int_range_list():
root = os.path.dirname(__file__)
source = IntakeSQLSource(
uri=os.path.join(root, 'catalog.yml'), root=root
)
df = pd._testing.makeMixedDataFrame()
filtered = source.get('test_sql', A=[(0, 1), (3, 4)])
expected = df[((df.A>=0) & (df.A<=1)) | ((df.A>=3) & (df.A<=4))].reset_index(drop=True)
pd.testing.assert_frame_equal(filtered, expected)
def test_intake_sql_filter_list():
root = os.path.dirname(__file__)
source = IntakeSQLSource(
uri=os.path.join(root, 'catalog.yml'), root=root
)
df = pd._testing.makeMixedDataFrame()
filtered = source.get('test_sql', C=['foo1', 'foo3'])
expected = df[df.C.isin(['foo1', 'foo3'])].reset_index(drop=True)
pd.testing.assert_frame_equal(filtered, expected)
def test_intake_sql_filter_list_with_None():
root = os.path.dirname(__file__)
source = IntakeSQLSource(
uri=os.path.join(root, 'catalog.yml'), root=root
)
df = pd._testing.makeMixedDataFrame()
filtered = source.get('test_sql_with_none', C=[None, 'foo5'])
expected = df[df.A.isin([1, 3, 4])].reset_index(drop=True)
expected['C'] = [None, None, 'foo5']
pd.testing.assert_frame_equal(filtered, expected)
source.clear_cache()
def test_intake_sql_transforms_cache():
root = os.path.dirname(__file__)
source = IntakeSQLSource(
uri=os.path.join(root, 'catalog.yml'), root=root
)
df = pd._testing.makeMixedDataFrame()
transforms = [SQLGroupBy(by=['B'], aggregates={'SUM': 'A'})]
source.get('test_sql', sql_transforms=transforms)
expected = df.groupby('B')['A'].sum().reset_index()
cache_key = ('test_sql', 'sql_transforms', tuple(transforms))
assert cache_key in source._cache
pd.testing.assert_frame_equal(source._cache[cache_key], expected)
| 37.883495
| 91
| 0.658893
| 1,038
| 7,804
| 4.710019
| 0.087669
| 0.039272
| 0.042544
| 0.052362
| 0.902843
| 0.880139
| 0.846594
| 0.837595
| 0.827163
| 0.817754
| 0
| 0.028065
| 0.182727
| 7,804
| 205
| 92
| 38.068293
| 0.738476
| 0
| 0
| 0.575269
| 0
| 0
| 0.124552
| 0
| 0
| 0
| 0
| 0
| 0.129032
| 1
| 0.086022
| false
| 0
| 0.026882
| 0
| 0.112903
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
fc9cc9174dbb7e4b0ff4a213151c6b43bd8c10aa
| 2,190
|
py
|
Python
|
src/IceRayPy/utility/light/sun.py
|
dmilos/IceRay
|
4e01f141363c0d126d3c700c1f5f892967e3d520
|
[
"MIT-0"
] | 2
|
2020-09-04T12:27:15.000Z
|
2022-01-17T14:49:40.000Z
|
src/IceRayPy/utility/light/sun.py
|
dmilos/IceRay
|
4e01f141363c0d126d3c700c1f5f892967e3d520
|
[
"MIT-0"
] | null | null | null |
src/IceRayPy/utility/light/sun.py
|
dmilos/IceRay
|
4e01f141363c0d126d3c700c1f5f892967e3d520
|
[
"MIT-0"
] | 1
|
2020-09-04T12:27:52.000Z
|
2020-09-04T12:27:52.000Z
|
import IceRayPy
Coord3D = IceRayPy.type.math.coord.Scalar3D
class Point:
def __init__( self, P_dll, P_center = None ):
self.m_implementation = IceRayPy.core.light.SunG( P_dll, IceRayPy.core.light.Point( P_dll, IceRayPy.core.light.Spot( Coord3D( 0, 0, 10 ) ) ) )
#self.m_implementation = IceRayPy.core.light.SunS( P_dll )
self.m_cargo = self.m_implementation.m_cargo
if( None != P_center ):
self.m_cargo.center( P_center )
def __del__( self ):
pass # Do nothing
class Area:
def __init__( self, P_dll, P_origin = None ):
self.m_implementation = IceRayPy.core.light.SunG( P_dll, IceRayPy.core.light.Area( P_dll ) )
self.m_cargo = self.m_implementation.m_cargo
if( None != P_origin ):
self.m_implementation.origin( P_origin )
def __del__( self ):
pass # Do nothing
class Line:
def __init__( self, P_dll, P_start = None , P_end = None ):
self.m_implementation = IceRayPy.core.light.SunG( P_dll, IceRayPy.core.light.Line( P_dll ) )
self.m_cargo = self.m_implementation.m_cargo
if( None != P_start ):
self.m_implementation.start( P_start )
if( None != P_end ):
self.m_implementation.end( P_end )
def __del__( self ):
pass # Do nothing
class Circle:
def __init__( self, P_dll, P_center = None ):
self.m_implementation = IceRayPy.core.light.SunG( P_dll, IceRayPy.core.light.Circle( P_dll ) )
self.m_cargo = self.m_implementation.m_cargo
if( None != P_center ):
self.m_implementation.center( P_center )
def __del__( self ):
pass # Do nothing
class Disc:
def __init__( self, P_dll, P_center = None ):
self.m_implementation = IceRayPy.core.light.SunG( P_dll, IceRayPy.core.light.Disc( P_dll ) )
self.m_cargo = self.m_implementation.m_cargo
if( None != P_center ):
self.m_implementation.center( P_center )
if( None != P_center ):
self.m_implementation.center( P_center )
def __del__( self ):
pass # Do nothing
| 35.322581
| 151
| 0.615068
| 293
| 2,190
| 4.242321
| 0.129693
| 0.092518
| 0.259855
| 0.13033
| 0.79646
| 0.779566
| 0.724859
| 0.679807
| 0.679807
| 0.679807
| 0
| 0.004444
| 0.280822
| 2,190
| 61
| 152
| 35.901639
| 0.784762
| 0.051598
| 0
| 0.543478
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.217391
| false
| 0.108696
| 0.021739
| 0
| 0.347826
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 6
|
fca6e7cc832a0acf6b9ee3b2025690ae9817b3ae
| 31,127
|
py
|
Python
|
custom_loss.py
|
Mirorrn/Spline-Lane-Detection
|
7535e3a1c0c347dafbb9d0efb7da390f0dc5e482
|
[
"MIT"
] | 1
|
2021-06-16T10:10:12.000Z
|
2021-06-16T10:10:12.000Z
|
custom_loss.py
|
Mirorrn/Spline-Lane-Detection
|
7535e3a1c0c347dafbb9d0efb7da390f0dc5e482
|
[
"MIT"
] | null | null | null |
custom_loss.py
|
Mirorrn/Spline-Lane-Detection
|
7535e3a1c0c347dafbb9d0efb7da390f0dc5e482
|
[
"MIT"
] | null | null | null |
import tensorflow as tf
import numpy as np
import config as cfg
from keras import backend as K
def huber(true, pred, delta):
loss = tf.where(tf.abs(true-pred) < delta , 0.5*((true-pred)**2), delta*tf.abs(true - pred) - 0.5*(delta**2))
# loss = tf.Print(loss, [loss], message="This is loss: ", summarize=1000)
return loss
class loss:
def __init__(self, config):
self.config = config
self.norm = config.img_h // 2.
self.alpha = config.alpha
self.focal_loss = True
# self.mloss_conf = tf.Variable(0., )
# self.mloss_loc = tf.Variable(0., )
def loss_test(self, y_true, y_pred):
batch_size = tf.shape(y_true)[0]
if self.config.staged:
y_pred = tf.reshape(y_pred, [batch_size, self.config.grid_size, self.config.grid_size, self.config.num_prediction_cells, 4])
y_true = tf.reshape(y_true, [batch_size, self.config.grid_size, self.config.grid_size,
self.config.num_prediction_cells, (self.config.grid_cel_size + 1)*4 + 5 + 1])
# alpha = self.config.alpha
# y_true = tf.Print(y_true, [tf.shape(y_true)], message="This is y_true: ", summarize=1000)
conf_true = y_true[:, :, :, :, -1]
conf_true = conf_true[..., np.newaxis]
# care rotated coeff normalized
#y_pred = tf.reshape(y_pred, [])
a_pred = y_pred[:, :, :, :, 0] * self.config.a_range #+ self.config.a_shift
a_pred = a_pred[..., np.newaxis]
b_pred = y_pred[:, :, :, :, 1] * self.config.b_range #+ self.config.b_shift
b_pred = b_pred[..., np.newaxis]
c_pred = y_pred[:, :, :, :, 2] * self.config.c_range #+ self.config.c_shift
c_pred = c_pred[..., np.newaxis]
# a_true = y_true[:, :, :, :, -10] * self.config.a_range + self.config.a_shift
# a_true = a_true[..., np.newaxis]
# b_true = y_true[:, :, :, :, -9] * self.config.b_range + self.config.b_shift
# b_true = b_true[..., np.newaxis]
# c_true = y_true[:, :, :, :, -8] * self.config.c_range + self.config.c_shift
# c_true = c_true[..., np.newaxis]
conf_pred = y_pred[:, :, :, :, 3]
conf_pred = conf_pred[..., np.newaxis]
x_tr = y_true[:, :, :, :, 0:self.config.grid_cel_size + 1]
################################################# matching part for splines
# dif1_target = y_true[:, :, :, :, -23:-6]
# counted_anchors = tf.to_float(tf.count_nonzero(matching_anchor_point1[:, :, :, :,1], axis=[1,2,3])) # care first cell in left corner assumes to be empty
# matching_anchor_point2 = y_true[:, :, :, :, -5:-1]
# matching_anchor_coef1 = tf.gather_nd(y_pred, tf.cast(matching_anchor_point1, tf.int32))
# matching_anchor_coef2 = tf.gather_nd(y_pred, tf.cast(matching_anchor_point2, tf.int32))
# a_pred_for_matching1 = matching_anchor_coef1[:, :, :, :, 0] * self.config.a_range + self.config.a_shift
# a_pred_for_matching1 = a_pred_for_matching1[..., np.newaxis]
# b_pred_for_matching1 = matching_anchor_coef1[:, :, :, :, 1] * self.config.b_range + self.config.b_shift
# b_pred_for_matching1 = b_pred_for_matching1[..., np.newaxis]
#
# c_pred_for_matching1 = matching_anchor_coef1[:, :, :, :, 2] * self.config.c_range + self.config.c_shift
# c_pred_for_matching1 = c_pred_for_matching1[..., np.newaxis]
# a_pred_for_matching2 = matching_anchor_coef2[:, :, :, :, 0] * self.config.a_range + self.config.a_shift
# a_pred_for_matching2 = a_pred_for_matching2[..., np.newaxis]
# b_pred_for_matching2 = matching_anchor_coef2[:, :, :, :, 1] * self.config.b_range + self.config.b_shift
# b_pred_for_matching2 = b_pred_for_matching2[..., np.newaxis]
# c_pred_for_matching2 = matching_anchor_coef2[:, :, :, :, 2] * self.config.c_range + self.config.c_shift
# c_pred_for_matching2 = c_pred_for_matching2[..., np.newaxis]
#x_anchor_point_1 = y_true[:, :, :, :, self.config.grid_cel_size]
# x_anchor_point_1 = x_anchor_point_1[..., np.newaxis]
# x_anchor_point_2 = y_true[:, :, :, :, 0]
# x_anchor_point_2 = x_anchor_point_2[..., np.newaxis]
# y_pre1 = 2 *a_pred * x_tr + b_pred #+ c_pred_for_matching1
# y_pre1 = tf.expand_dims(matching_anchor_coef1[:, :, :, :,2*( self.config.grid_cel_size+1) -1], -1)
# y_pre2 = 2 *a_pred_for_matching2 * x_anchor_point_2 + b_pred_for_matching2 #+ c_pred_for_matching2
# y_pre1 = tf.Print(y_pre1, [y_pre1], message="This is y_pre1: ", summarize=1000)
# y_pre2 = tf.expand_dims(matching_anchor_coef2[:, :, :, :, self.config.grid_cel_size+1], -1)
# dif_cell = ( y_true[:, :, :, :, -3] - y_true[:, :, :, :, -7]) * self.config.grid_cel_size
#dif_cell = tf.Print(dif_cell, [dif_cell], message="This is dif_cell: ", summarize=1000)
#dif_cell = dif_cell[..., np.newaxis]
# y_pre2 = dif1_target #+ dif_cell
# y_pre2 = tf.Print(y_pre2, [y_pre2], message="This is y_pre2: ", summarize=1000)
# y_pre2 = y_pre2[..., np.newaxis]
# y_pre3 = 2 * a_pred_for_matching1 * x_anchor_point_1 + b_pred_for_matching1# * x_anchor_point_1 #+ c_pred_for_matching1
# y_pre4 = 2 * a_pred_for_matching2 * x_anchor_point_2 + b_pred_for_matching2# * x_anchor_point_2 #+ c_pred_for_matching2
# dif_0 = tf.expand_dims(huber(y_pre1, y_pre2, 0.5), -1)
# dif_1 = huber(y_pre3, y_pre4, 0.5)
# dif_test = tf.Print(dif_test, [dif_test], message="This is dif_test: ", summarize=1000)
# sum_dif_0 = tf.reduce_sum(tf.multiply(conf_true, tf.reduce_mean(dif_0, axis=-1)), axis=[1, 2, 3, 4])
# sum_dif_0 = tf.Print(sum_dif_0, [sum_dif_0], message="This is sum_dif_0: ", summarize=1000)
# dif_0
# sum_dif_1 = tf.reduce_sum(tf.multiply(conf_true, dif_1), axis=[1, 2, 3, 4])
# sum_dif_0 = tf.Print(sum_dif_0, [sum_dif_0], message="This is sum_dif_0: ", summarize=1000)
#counted_anchors = tf.Print(counted_anchors, [counted_anchors], message="This is counted_anchors: ", summarize=1000)
# loss_anchor = tf.divide( test_sum, counted_anchors)
# loss_anchor = tf.Print(loss_anchor, [loss_anchor], message="This is counted_anchors: ",
# summarize=1000)
#self.loss_anchor = 0.1 * tf.reduce_mean(test_sum) # 0.5 factor?
# self.loss_anchor = tf.Print(self.loss_anchor, [self.loss_anchor], message="This is self.loss_anchor: ", summarize=1000)
################################################ end of matching part for splines!
y_tr = y_true[:, :, :, :, self.config.grid_cel_size + 1:2*(self.config.grid_cel_size + 1)]
#y_tr = tf.Print(y_tr, [y_tr], message="This is y_tr: ", summarize=1000)
#non_nans_idc = tf.where((y_tr != 1))
#non_nans_idc = non_nans_idc[..., np.newaxis]
counted_non_nan = tf.to_float(tf.count_nonzero(tf.to_float(tf.logical_not(tf.is_nan(y_tr))), axis=-1))
y_tr = tf.where(tf.is_nan(y_tr), tf.zeros_like(y_tr), y_tr)
#y_tr = tf.Print(y_tr, [tf.shape(y_tr)], message="This is y_tr: ", summarize=1000)
y_pre = a_pred * x_tr ** 2 + b_pred * x_tr + c_pred
weights = y_true[:, :, :, :, 2*(self.config.grid_cel_size + 1): 3*(self.config.grid_cel_size + 1)]
# rotate prediction x = -y and y = x
#conf_true = tf.Print(conf_true, [conf_true], message="This is conf: ", summarize=1000)
#tf.boolean_mask(x, tf.logical_not(tf.is_inf(x))))
# loss_loc = tf.multiply(conf_true, tf.expand_dims(tf.divide( tf.reduce_sum(huber(y_tr,y_pre, .5)* weights, axis=-1), counted_non_nan), -1))
loss_loc = tf.multiply(conf_true, tf.expand_dims(tf.reduce_mean(huber(y_tr, y_pre, 0.5)* weights, axis=-1), -1))
#loss_loc = tf.Print(loss_loc, [loss_loc], message="This is loss_loc: ", summarize=1000)
#loss_loc = tf.Print(loss_loc, [loss_loc], message="This is loss: ", summarize=1000)
numb_of_trues = tf.count_nonzero(conf_true, axis=[1,2,3,4])
numb_of_trues = tf.where(numb_of_trues == 0, tf.ones_like(numb_of_trues), numb_of_trues)
numb_of_trues = tf.to_float(numb_of_trues)
sum_loss_loc = tf.reduce_sum(loss_loc, axis=[1,2,3,4] )
#numb_of_trues = tf.Print(numb_of_trues, [numb_of_trues], message="This is numb_of_trues: ", summarize=1000)
#loss_loc = tf.div_no_nan(sum_loss_loc, numb_of_trues) # wrong, but gives a clue of weighting loc and conf
self.mloss_loc = (1. - self.config.alpha)*(tf.reduce_mean(sum_loss_loc)) # 0.5 factor?
# self.mloss_loc = tf.reduce_mean(sum_loss_loc) # 0.5 factor?
# loss_loc = tf.Print(loss_loc, [loss_loc], message="This is loss: ", summarize=1000)
# CONF LOSS
# conf_true_reshaped = tf.reshape(conf_true, [batch_size * (self.config.grid_size ** 2) * self.config.num_prediction_cells, 1])
# conf_pred = tf.reshape(conf_pred, [batch_size * (self.config.grid_size ** 2) * self.config.num_prediction_cells, 1])
# conf_pred = tf.Print(conf_pred, [conf_pred], message="conf_pred: ", summarize=1000)
# loss_conf = tf.nn.sigmoid_cross_entropy_with_logits(labels=conf_true_reshaped, logits=conf_pred)#,# weights=conf_true_reshaped * 1.5,
# # reduction=tf.losses.Reduction.NONE, label_smoothing=0.01)
if self.focal_loss:
print('Using Focal loss!')
conf_pred = K.clip(tf.sigmoid(conf_pred), K.epsilon(), 1 - K.epsilon())
pt_1 = tf.where(tf.equal(conf_true, 1), conf_pred, tf.ones_like(conf_pred))
pt_0 = tf.where(tf.equal(conf_true, 0), conf_pred, tf.zeros_like(conf_pred))
alpha= .25
gamma = 0.01
sum_loss_conf = -tf.reduce_sum(alpha * K.pow(1. - pt_1, gamma) * K.log(pt_1),axis=[1,2,3,4]) - tf.reduce_sum((1 - alpha) * K.pow(pt_0, gamma) * K.log(1. - pt_0),axis=[1,2,3,4])
#sum_loss_conf = tf.Print(sum_loss_conf, [sum_loss_conf], message="sum_loss_conf: ", summarize=1000)
#loss_conf = tf.div_no_nan(sum_loss_conf, numb_of_trues)
self.mloss_conf = self.config.alpha * tf.reduce_mean(sum_loss_conf) # 0.5 factor?
else:
print('Using cross entropie!')
#conf_pred = K.clip(tf.sigmoid(conf_pred), K.epsilon(), 1 - K.epsilon())
# pt_1 = tf.where(tf.equal(conf_true, 1), conf_pred, tf.ones_like(conf_pred))
# pt_0 = tf.where(tf.equal(conf_true, 0), conf_pred, tf.zeros_like(conf_pred))
# pt_1 = tf.Print(pt_1, [pt_1], message="This is pt_1: ", summarize=1000)
#sum_loss_conf = -tf.reduce_sum(K.log(pt_1), axis=[1, 2, 3, 4]) - tf.reduce_sum(K.log(1. - pt_0), axis=[1, 2, 3, 4])
conf_true_reshaped = tf.reshape(conf_true, [batch_size * (self.config.grid_size ** 2) * self.config.num_prediction_cells, 1])
conf_pred = tf.reshape(conf_pred,[batch_size * (self.config.grid_size ** 2) * self.config.num_prediction_cells, 1])
loss_conf = tf.losses.sigmoid_cross_entropy(multi_class_labels=conf_true_reshaped, logits=conf_pred,# weights=conf_true_reshaped * 1.5,
reduction=tf.losses.Reduction.NONE,)# label_smoothing=0)
loss_conf = tf.reshape(loss_conf, [batch_size, self.config.grid_size, self.config.grid_size,
self.config.num_prediction_cells, 1])
sum_loss_conf = tf.reduce_sum(loss_conf, axis=[1, 2, 3, 4])
# loss_conf = tf.div_no_nan(sum_loss_conf, numb_of_trues)
self.mloss_conf = self.config.alpha * tf.reduce_mean(sum_loss_conf) # 0.5 factor?
# conf_pred = tf.reshape(conf_pred, [batch_size * (self.config.grid_size ** 2) * self.config.num_prediction_cells, 1])
# loss_conf = (1. + tf.abs(conf_true_reshaped-tf.sigmoid(conf_pred))**self.config.cls_reg) * loss_conf # https://arxiv.org/pdf/1708.02002.pdf
# loss_conf = tf.Print(loss_conf, [loss_conf], message="loss_conf2: ", summarize=1000)
#loss_conf = tf.expand_dims(tf.square(conf_true_reshaped -conf_pred ), -1 )
# loss_conf = tf.reshape(loss_conf, [batch_size, self.config.grid_size, self.config.grid_size, self.config.num_prediction_cells, 1])
self.loss_sum = self.mloss_conf + self.mloss_loc
return self.loss_sum # + self.loss_anchor
#return tf.maximum(self.mloss_conf,self.mloss_loc) #+ self.loss_anchor
def loss(self, y_true, y_pred):
batch_size = tf.shape(y_true)[0]
if self.config.staged:
y_pred = tf.reshape(y_pred, [batch_size, self.config.grid_size, self.config.grid_size,
self.config.num_prediction_cells, 4])
y_true = tf.reshape(y_true, [batch_size, self.config.grid_size, self.config.grid_size,
self.config.num_prediction_cells, (self.config.grid_cel_size + 1) * 4 + 5 + 1])
conf_true = y_true[:, :, :, :, -1]
conf_true = conf_true[..., np.newaxis]
# care rotated coeff normalized
a_pred = y_pred[:, :, :, :, 0] * self.config.a_range
a_pred = a_pred[..., np.newaxis]
b_pred = y_pred[:, :, :, :, 1] * self.config.b_range
b_pred = b_pred[..., np.newaxis]
c_pred = y_pred[:, :, :, :, 2] * self.config.c_range
c_pred = c_pred[..., np.newaxis]
conf_pred = y_pred[:, :, :, :, 3]
conf_pred = conf_pred[..., np.newaxis]
# numb_of_trues = tf.count_nonzero(conf_true, axis=[1, 2, 3, 4])
# numb_of_trues = tf.where(numb_of_trues == 0, tf.ones_like(numb_of_trues), numb_of_trues)
# numb_of_trues = tf.to_float(numb_of_trues)
x_tr = y_true[:, :, :, :, 0:self.config.grid_cel_size + 1]
y_tr = y_true[:, :, :, :, self.config.grid_cel_size + 1:2 * (self.config.grid_cel_size + 1)]
#y_tr = tf.where(tf.is_nan(y_tr), tf.zeros_like(y_tr), y_tr)
y_pre = a_pred * x_tr ** 2 + b_pred * x_tr + c_pred
weights = y_true[:, :, :, :, 2 * (self.config.grid_cel_size + 1): 3 * (self.config.grid_cel_size + 1)]
loss_loc = tf.multiply(conf_true, tf.expand_dims(tf.reduce_mean(huber(y_tr, y_pre, 0.5) * weights, axis=-1), -1))
sum_loss_loc = tf.reduce_sum(loss_loc, axis=[1, 2, 3, 4])
# sum_loss_loc = tf.div_no_nan(sum_loss_loc, numb_of_trues)
self.mloss_loc = (1. - self.config.alpha) * (tf.reduce_mean(sum_loss_loc)) # 0.5 factor?
if self.focal_loss:
print('Using Focal loss!')
conf_pred = K.clip(tf.sigmoid(conf_pred), K.epsilon(), 1 - K.epsilon())
pt_1 = tf.where(tf.equal(conf_true, 1), conf_pred, tf.ones_like(conf_pred))
pt_0 = tf.where(tf.equal(conf_true, 0), conf_pred, tf.zeros_like(conf_pred))
# sum_loss_conf = -tf.reduce_sum(self.config.alpha_focal * K.pow(1. - pt_1, self.config.gamma) * K.log(pt_1),
# axis=[1, 2, 3, 4]) - tf.reduce_sum(
# (1 - self.config.alpha_focal) * K.pow(pt_0, self.config.gamma) * K.log(1. - pt_0), axis=[1, 2, 3, 4])
self.mloss_conf_TRUE = tf.reduce_mean(-tf.reduce_sum(self.config.alpha_focal * K.pow(1. - pt_1, self.config.gamma) * K.log(pt_1),
axis=[1, 2, 3, 4]))
self.mloss_conf_FALSE = tf.reduce_mean( - tf.reduce_sum(
(1 - self.config.alpha_focal) * K.pow(pt_0, self.config.gamma) * K.log(1. - pt_0), axis=[1, 2, 3, 4]))
sum_loss_conf = self.mloss_conf_TRUE + self.mloss_conf_FALSE
# sum_loss_conf = tf.Print(sum_loss_conf, [sum_loss_conf], message="sum_loss_conf: ", summarize=1000)
# loss_conf = tf.div_no_nan(sum_loss_conf, numb_of_trues)
self.mloss_conf = self.config.focal_loss_param * self.config.alpha * sum_loss_conf
# self.mloss_conf =self.config.focal_loss_param * self.config.alpha * tf.reduce_mean(sum_loss_conf) # 0.5 factor?
else:
print('Using cross entropie!')
# conf_true_reshaped = tf.reshape(conf_true, [batch_size * (self.config.grid_size ** 2) * self.config.num_prediction_cells, 1])
# conf_pred = tf.reshape(conf_pred,[batch_size * (self.config.grid_size ** 2) * self.config.num_prediction_cells, 1])
# loss_conf = tf.expand_dims(tf.nn.sigmoid_cross_entropy_with_logits(labels=conf_true_reshaped, logits=conf_pred), -1)
# loss_conf = tf.reshape(loss_conf, [batch_size, self.config.grid_size, self.config.grid_size,
# self.config.num_prediction_cells, 1])
# sum_loss_conf = tf.reduce_sum(loss_conf, axis=[1, 2, 3, 4])
conf_pred = K.clip(tf.sigmoid(conf_pred), K.epsilon(), 1 - K.epsilon())
pt_1 = tf.where(tf.equal(conf_true, 1), conf_pred, tf.ones_like(conf_pred))
pt_0 = tf.where(tf.equal(conf_true, 0), conf_pred, tf.zeros_like(conf_pred))
sum_loss_conf = -tf.reduce_sum(K.log(pt_1),
axis=[1, 2, 3, 4]) - tf.reduce_sum(K.log(1. - pt_0), axis=[1, 2, 3, 4])
# sum_loss_conf = tf.div_no_nan(sum_loss_conf, numb_of_trues)
self.mloss_conf = self.config.alpha * tf.reduce_mean(sum_loss_conf) # 0.5 factor?
self.loss_sum = self.mloss_conf + self.mloss_loc
return self.loss_sum # + self.loss_anchor
# return tf.maximum(self.mloss_conf,self.mloss_loc) #+ self.loss_anchor
def loss_KONF(self, y_true, y_pred):
batch_size = tf.shape(y_true)[0]
if self.config.staged:
y_pred = tf.reshape(y_pred, [batch_size, self.config.grid_size, self.config.grid_size,
self.config.num_prediction_cells, 1])
y_true = tf.reshape(y_true, [batch_size, self.config.grid_size, self.config.grid_size,
self.config.num_prediction_cells, (self.config.grid_cel_size + 1) * 4 + 5 + 1])
conf_pred = y_pred[:, :, :, :, 0]
conf_pred = conf_pred[..., np.newaxis]
conf_true = y_true[:, :, :, :, -1]
conf_true = conf_true[..., np.newaxis]
if self.focal_loss:
print('Using Focal loss!')
conf_pred = K.clip(tf.sigmoid(conf_pred), K.epsilon(), 1 - K.epsilon())
pt_1 = tf.where(tf.equal(conf_true, 1), conf_pred, tf.ones_like(conf_pred))
pt_0 = tf.where(tf.equal(conf_true, 0), conf_pred, tf.zeros_like(conf_pred))
# sum_loss_conf = -tf.reduce_sum(self.config.alpha_focal * K.pow(1. - pt_1, self.config.gamma) * K.log(pt_1),
# axis=[1, 2, 3, 4]) - tf.reduce_sum(
# (1 - self.config.alpha_focal) * K.pow(pt_0, self.config.gamma) * K.log(1. - pt_0), axis=[1, 2, 3, 4])
self.mloss_conf_TRUE = tf.reduce_mean(-tf.reduce_sum(self.config.alpha_focal * K.pow(1. - pt_1, self.config.gamma) * K.log(pt_1),
axis=[1, 2, 3, 4]))
self.mloss_conf_FALSE = tf.reduce_mean( - tf.reduce_sum(
(1 - self.config.alpha_focal) * K.pow(pt_0, self.config.gamma) * K.log(1. - pt_0), axis=[1, 2, 3, 4]))
sum_loss_conf = self.mloss_conf_TRUE + self.mloss_conf_FALSE
# sum_loss_conf = tf.Print(sum_loss_conf, [sum_loss_conf], message="sum_loss_conf: ", summarize=1000)
# loss_conf = tf.div_no_nan(sum_loss_conf, numb_of_trues)
self.mloss_conf = self.config.focal_loss_param * self.config.alpha * sum_loss_conf
# self.mloss_conf =self.config.focal_loss_param * self.config.alpha * tf.reduce_mean(sum_loss_conf) # 0.5 factor?
else:
print('Using cross entropie!')
# conf_true_reshaped = tf.reshape(conf_true, [batch_size * (self.config.grid_size ** 2) * self.config.num_prediction_cells, 1])
# conf_pred = tf.reshape(conf_pred,[batch_size * (self.config.grid_size ** 2) * self.config.num_prediction_cells, 1])
# loss_conf = tf.expand_dims(tf.nn.sigmoid_cross_entropy_with_logits(labels=conf_true_reshaped, logits=conf_pred), -1)
# loss_conf = tf.reshape(loss_conf, [batch_size, self.config.grid_size, self.config.grid_size,
# self.config.num_prediction_cells, 1])
# sum_loss_conf = tf.reduce_sum(loss_conf, axis=[1, 2, 3, 4])
conf_pred = K.clip(tf.sigmoid(conf_pred), K.epsilon(), 1 - K.epsilon())
pt_1 = tf.where(tf.equal(conf_true, 1), conf_pred, tf.ones_like(conf_pred))
pt_0 = tf.where(tf.equal(conf_true, 0), conf_pred, tf.zeros_like(conf_pred))
sum_loss_conf = -tf.reduce_sum(K.log(pt_1),
axis=[1, 2, 3, 4]) - tf.reduce_sum(K.log(1. - pt_0), axis=[1, 2, 3, 4])
# sum_loss_conf = tf.div_no_nan(sum_loss_conf, numb_of_trues)
self.mloss_conf = self.config.alpha * tf.reduce_mean(sum_loss_conf) # 0.5 factor?
return self.mloss_conf
def loss_LOK(self, y_true, y_pred):
batch_size = tf.shape(y_true)[0]
if self.config.staged:
y_pred = tf.reshape(y_pred, [batch_size, self.config.grid_size, self.config.grid_size,
self.config.num_prediction_cells, 3])
y_true = tf.reshape(y_true, [batch_size, self.config.grid_size, self.config.grid_size,
self.config.num_prediction_cells, (self.config.grid_cel_size + 1) * 4 + 5 + 1])
conf_true = y_true[:, :, :, :, -1]
conf_true = conf_true[..., np.newaxis]
# care rotated coeff normalized
a_pred = y_pred[:, :, :, :, 0] * self.config.a_range
a_pred = a_pred[..., np.newaxis]
b_pred = y_pred[:, :, :, :, 1] * self.config.b_range
b_pred = b_pred[..., np.newaxis]
c_pred = y_pred[:, :, :, :, 2] * self.config.c_range
c_pred = c_pred[..., np.newaxis]
x_tr = y_true[:, :, :, :, 0:self.config.grid_cel_size + 1]
y_tr = y_true[:, :, :, :, self.config.grid_cel_size + 1:2 * (self.config.grid_cel_size + 1)]
# y_tr = tf.where(tf.is_nan(y_tr), tf.zeros_like(y_tr), y_tr)
y_pre = a_pred * x_tr ** 2 + b_pred * x_tr + c_pred
weights = y_true[:, :, :, :, 2 * (self.config.grid_cel_size + 1): 3 * (self.config.grid_cel_size + 1)]
loss_loc = tf.multiply(conf_true,
tf.expand_dims(tf.reduce_mean(huber(y_tr, y_pre, 0.5) * weights, axis=-1), -1))
sum_loss_loc = tf.reduce_sum(loss_loc, axis=[1, 2, 3, 4])
# sum_loss_loc = tf.div_no_nan(sum_loss_loc, numb_of_trues)
self.mloss_loc = (1. - self.config.alpha) * (tf.reduce_mean(sum_loss_loc)) # 0.5 factor?
return self.mloss_loc
def loss_lane(self, y_true, y_pred):
conf_true = y_true[:, :, -1]
conf_true = conf_true[..., np.newaxis]
# care rotated coeff normalized
#y_pred = tf.reshape(y_pred, [])
a_pred = y_pred[:, :, 0] * self.config.a_range + self.config.a_shift
a_pred = a_pred[..., np.newaxis]
b_pred = y_pred[:, :, 1] * self.config.b_range + self.config.b_shift
b_pred = b_pred[..., np.newaxis]
c_pred = y_pred[:, :, 2] * self.config.c_range + self.config.c_shift
c_pred = c_pred[..., np.newaxis]
# ytrue y points
y_points = y_true[:, :, 3:6]
#y_points = y_points[..., np.newaxis]
# ytrue x points
x_points = y_true[:, :, 0:3]
#x_points = x_points[..., np.newaxis]
conf_pred = y_pred[:, :, -1]
conf_pred = conf_pred[..., np.newaxis]
y_pre = (a_pred * x_points ** 2 + b_pred * x_points + c_pred)
#y_pre = tf.Print(y_pre, [y_pre], message="This is conf: ", summarize=1000)
loss_loc = tf.multiply(conf_true, tf.expand_dims(tf.reduce_sum(huber(y_points,y_pre, 0.5), axis=-1), -1))
sum_loss_loc = tf.reduce_sum(loss_loc, axis=[1, 2] )
self.mloss_loc = tf.reduce_mean(sum_loss_loc) # 0.5 factor?
# x_min = y_true[:, :, 6]
# x_min = x_min[..., np.newaxis]
# x_max = y_true[:, :, 11]
# x_max = x_max[..., np.newaxis]
# x_minmax = tf.concat([x_min,x_max] , axis=-1)
x_minmax = x_points
#x_minmax = tf.Print(x_minmax, [x_minmax], message="This is x_minmax: ", summarize=1000)
loss_coord = tf.multiply(conf_true, tf.expand_dims(tf.reduce_sum(huber(x_minmax, y_pred[:, :, 3:6], 0.5), axis=-1), -1))
# loss_coord = tf.Print(loss_coord, [loss_coord], message="This is conf: ", summarize=1000)
loss_coord = tf.reduce_sum(loss_coord, axis=[1, 2])
# sum_loss_coord = tf.Print(sum_loss_coord, [sum_loss_coord], message="This is sum_loss_coord: ", summarize=1000)
self.mloss_coord = tf.reduce_mean(loss_coord) # 0.5 factor?
# CONF LOSS
batch_size = tf.shape(y_true)[0]
conf_true_reshaped = tf.reshape(conf_true, [batch_size * self.config.num_prediction_cells, 1])
conf_pred_reshaped = tf.reshape(conf_pred, [batch_size * self.config.num_prediction_cells, 1])
loss_conf = tf.expand_dims(tf.losses.sigmoid_cross_entropy(multi_class_labels=conf_true_reshaped, logits=conf_pred_reshaped,# weights=conf_true_reshaped * 1.5,
reduction=tf.losses.Reduction.NONE, label_smoothing=0), -1)
loss_conf = tf.reshape(loss_conf, [batch_size, self.config.num_prediction_cells, 1])
# loss_conf = tf.Print(loss_conf, [loss_conf], message="This is loss_conf: ", summarize=1000)
sum_loss_conf = tf.reduce_sum(loss_conf, axis=[1, 2])
self.mloss_conf = tf.reduce_mean(sum_loss_conf) # 0.5 factor?
return self.mloss_conf + self.mloss_coord + self.mloss_loc
def loss_nb(self, y_true, y_pred):
batch_size = tf.shape(y_true)[0]
#################################################################################################
c_tr = y_true
c_pre = y_pred
# c_pre = tf.Print(c_pre, [tf.shape(c_pre)], message="This is c_pre: ", summarize=1000)
# error = tf.reduce_sum(tf.square(y_true-y_pred), axis=-1)
# c_pre = tf.Print(c_pre, [c_pre], message="This is c_pre: ", summarize=1000)
c_tr_flatten = tf.reshape(c_tr, [batch_size * (self.config.grid_size ** 2), 1])
c_pre_flatten = tf.reshape(c_pre, [batch_size * (self.config.grid_size ** 2), 1])
# c_pre_flatten = tf.Print(c_pre_flatten, [c_pre_flatten], message="This is c_pre_flatten: ", summarize=1000)
loss_c = tf.expand_dims(
tf.losses.sigmoid_cross_entropy(multi_class_labels=c_tr_flatten, logits=c_pre_flatten,
reduction=tf.losses.Reduction.NONE, label_smoothing=0), -1)
loss_c = tf.reshape(loss_c, [batch_size, self.config.grid_size, self.config.grid_size, 1])
# loss_c = tf.Print(loss_c, [loss_c], message="This is loss_c: ", summarize=1000)
sum_loss_c = tf.reduce_sum(loss_c, axis=[1, 2])
# sum_loss_c = tf.Print(sum_loss_c, [sum_loss_c], message="This is sum_loss_c: ", summarize=1000)
self.mloss_segmentation = tf.reduce_mean(sum_loss_c)
return self.mloss_segmentation
def loss_lane_points(self, y_true, y_pred):
#alpha = self.config.alpha
conf_true = y_true[:, :, -1]
conf_true = conf_true[..., np.newaxis]
# ytrue y points
y_points = y_true[:, :, 3:6]
#y_points = y_points[..., np.newaxis]
# ytrue x points
x_points = y_true[:, :, 0:6]
#x_points = x_points[..., np.newaxis]
conf_pred = y_pred[:, :, -1]
conf_pred = conf_pred[..., np.newaxis]
#y_pre = (a_pred * x_points ** 2 + b_pred * x_points + c_pred)
#y_pre = tf.Print(y_pre, [y_pre], message="This is conf: ", summarize=1000)
#loss_loc = tf.multiply(conf_true, tf.expand_dims(tf.reduce_sum(huber(y_points,y_pre, 0.5), axis=-1), -1))
#sum_loss_loc = tf.reduce_sum(loss_loc, axis=[1, 2] )
#self.mloss_loc = tf.reduce_mean(sum_loss_loc) # 0.5 factor?
# x_min = y_true[:, :, 6]
# x_min = x_min[..., np.newaxis]
# x_max = y_true[:, :, 11]
# x_max = x_max[..., np.newaxis]
# x_minmax = tf.concat([x_min,x_max] , axis=-1)
x_minmax = x_points
#x_minmax = tf.Print(x_minmax, [x_minmax], message="This is x_minmax: ", summarize=1000)
loss_coord = tf.multiply(conf_true, tf.expand_dims(tf.reduce_sum(huber(x_minmax, y_pred[:, :, 0:6], 0.5), axis=-1), -1))
# loss_coord = tf.Print(loss_coord, [loss_coord], message="This is conf: ", summarize=1000)
loss_coord = tf.reduce_sum(loss_coord, axis=[1, 2])
# sum_loss_coord = tf.Print(sum_loss_coord, [sum_loss_coord], message="This is sum_loss_coord: ", summarize=1000)
self.mloss_coord = tf.reduce_mean(loss_coord) # 0.5 factor?
# CONF LOSS
batch_size = tf.shape(y_true)[0]
conf_true_reshaped = tf.reshape(conf_true, [batch_size * self.config.num_prediction_cells, 1])
conf_pred_reshaped = tf.reshape(conf_pred, [batch_size * self.config.num_prediction_cells, 1])
loss_conf = tf.expand_dims(tf.losses.sigmoid_cross_entropy(multi_class_labels=conf_true_reshaped, logits=conf_pred_reshaped,# weights=conf_true_reshaped * 1.5,
reduction=tf.losses.Reduction.NONE, label_smoothing=0), -1)
loss_conf = tf.reshape(loss_conf, [batch_size, self.config.num_prediction_cells, 1])
# loss_conf = tf.Print(loss_conf, [loss_conf], message="This is loss_conf: ", summarize=1000)
sum_loss_conf = tf.reduce_sum(loss_conf, axis=[1, 2])
self.mloss_conf = tf.reduce_mean(sum_loss_conf) # 0.5 factor?
return self.mloss_conf + self.mloss_coord #+ self.mloss_loc
def loss_sum(self,y_true, y_pred):
return self.loss_sum
def lossTRUE(self,y_true, y_pred):
return self.mloss_conf_TRUE
def lossFALSE(self,y_true, y_pred):
return self.mloss_conf_FALSE
def confidence_loss(self,y_true, y_pred):
return self.mloss_conf
def loc_loss(self,y_true, y_pred):
return self.mloss_loc
def loss_coord(self,y_true, y_pred):
return self.mloss_coord
def loss_segmentation(self,y_true, y_pred):
return self.mloss_segmentation
# sum_loss_loc = tf.reduce_sum(loss_loc, axis=[1,2,3] )
# loss_loc = tf.truediv(sum_loss_loc, numb_of_trues) # wrong, but gives a clue of weighting loc and conf
# loss_loc_without_nans_mask = tf.logical_not(tf.is_nan(loss_loc))#filter nan's!
# loss_loc_without_nans_mask = tf.Print(loss_loc_without_nans_mask, [loss_loc_without_nans_mask], message="This is loss: ", summarize=1000)
# loss_loc = tf.boolean_mask(loss_loc, loss_loc_without_nans_mask)
# self.mloss_loc = 0.5 * tf.reduce_mean(loss_loc) # 0.5 factor?
| 56.185921
| 188
| 0.615768
| 4,809
| 31,127
| 3.66022
| 0.043668
| 0.092603
| 0.047722
| 0.037837
| 0.854221
| 0.816669
| 0.781161
| 0.761334
| 0.7573
| 0.729576
| 0
| 0.031489
| 0.232788
| 31,127
| 554
| 189
| 56.185921
| 0.705582
| 0.43435
| 0
| 0.698745
| 0
| 0
| 0.006634
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.066946
| false
| 0
| 0.016736
| 0.029289
| 0.150628
| 0.025105
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
5d7f95820466f18515766e62e41780268e866fb3
| 185
|
py
|
Python
|
reproject/setup_package.py
|
pllim/reproject
|
35415be015a28ba097526649b1b02b85c4893e90
|
[
"BSD-3-Clause"
] | 29
|
2015-02-24T17:55:31.000Z
|
2018-11-15T23:20:30.000Z
|
reproject/setup_package.py
|
pllim/reproject
|
35415be015a28ba097526649b1b02b85c4893e90
|
[
"BSD-3-Clause"
] | 93
|
2015-02-27T08:26:38.000Z
|
2018-12-12T08:30:18.000Z
|
reproject/setup_package.py
|
pllim/reproject
|
35415be015a28ba097526649b1b02b85c4893e90
|
[
"BSD-3-Clause"
] | 22
|
2015-04-13T16:56:32.000Z
|
2018-08-09T17:08:10.000Z
|
def get_package_data():
return {
_ASTROPY_PACKAGE_NAME_ + '.tests': ['coveragerc', 'data/*'],
_ASTROPY_PACKAGE_NAME_ + '.interpolation.tests': ['reference/*']
}
| 30.833333
| 72
| 0.621622
| 17
| 185
| 6.176471
| 0.647059
| 0.266667
| 0.342857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.205405
| 185
| 5
| 73
| 37
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0.286486
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| true
| 0
| 0
| 0.2
| 0.4
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
|
0
| 6
|
5d8c83807cb911c45577dfa33e1923d33dcf6921
| 7,301
|
py
|
Python
|
tests/integration/test_sort.py
|
ckmganesh/dask-sql
|
5a056cc5e3e80463fb3d16dc45f1feffbf278b65
|
[
"MIT"
] | 1
|
2021-02-18T06:47:56.000Z
|
2021-02-18T06:47:56.000Z
|
tests/integration/test_sort.py
|
ckmganesh/dask-sql
|
5a056cc5e3e80463fb3d16dc45f1feffbf278b65
|
[
"MIT"
] | null | null | null |
tests/integration/test_sort.py
|
ckmganesh/dask-sql
|
5a056cc5e3e80463fb3d16dc45f1feffbf278b65
|
[
"MIT"
] | null | null | null |
from dask_sql.context import Context
import pytest
from pandas.testing import assert_frame_equal, assert_series_equal
import pandas as pd
import dask.dataframe as dd
def test_sort(c, user_table_1, df):
df_result = c.sql(
"""
SELECT
*
FROM user_table_1
ORDER BY b, user_id DESC
"""
)
df_result = df_result.compute().reset_index(drop=True)
df_expected = user_table_1.sort_values(
["b", "user_id"], ascending=[True, False]
).reset_index(drop=True)
assert_frame_equal(df_result, df_expected)
df_result = c.sql(
"""
SELECT
*
FROM df
ORDER BY b DESC, a DESC
"""
)
df_result = df_result.compute()
df_expected = df.sort_values(["b", "a"], ascending=[False, False])
assert_frame_equal(
df_result.reset_index(drop=True), df_expected.reset_index(drop=True)
)
df_result = c.sql(
"""
SELECT
*
FROM df
ORDER BY a DESC, b
"""
)
df_result = df_result.compute()
df_expected = df.sort_values(["a", "b"], ascending=[False, True])
assert_frame_equal(
df_result.reset_index(drop=True), df_expected.reset_index(drop=True)
)
df_result = c.sql(
"""
SELECT
*
FROM df
ORDER BY b, a
"""
)
df_result = df_result.compute()
df_expected = df.sort_values(["b", "a"], ascending=[True, True])
assert_frame_equal(
df_result.reset_index(drop=True), df_expected.reset_index(drop=True)
)
def test_sort_by_alias(c, user_table_1):
df_result = c.sql(
"""
SELECT
b AS my_column
FROM user_table_1
ORDER BY my_column, user_id DESC
"""
)
df_result = (
df_result.compute().reset_index(drop=True).rename(columns={"my_column": "b"})
)
df_expected = user_table_1.sort_values(
["b", "user_id"], ascending=[True, False]
).reset_index(drop=True)[["b"]]
assert_frame_equal(df_result, df_expected)
def test_sort_with_nan():
c = Context()
df = pd.DataFrame(
{"a": [1, 2, float("nan"), 2], "b": [4, float("nan"), 5, float("inf")]}
)
c.create_table("df", df)
df_result = c.sql("SELECT * FROM df ORDER BY a").compute().reset_index(drop=True)
assert_frame_equal(
df_result,
pd.DataFrame(
{"a": [1, 2, 2, float("nan")], "b": [4, float("nan"), float("inf"), 5]}
),
)
df_result = (
c.sql("SELECT * FROM df ORDER BY a NULLS FIRST")
.compute()
.reset_index(drop=True)
)
assert_frame_equal(
df_result,
pd.DataFrame(
{"a": [float("nan"), 1, 2, 2], "b": [5, 4, float("nan"), float("inf")]}
),
)
df_result = (
c.sql("SELECT * FROM df ORDER BY a NULLS LAST").compute().reset_index(drop=True)
)
assert_frame_equal(
df_result,
pd.DataFrame(
{"a": [1, 2, 2, float("nan")], "b": [4, float("nan"), float("inf"), 5]}
),
)
df_result = (
c.sql("SELECT * FROM df ORDER BY a ASC").compute().reset_index(drop=True)
)
assert_frame_equal(
df_result,
pd.DataFrame(
{"a": [1, 2, 2, float("nan")], "b": [4, float("nan"), float("inf"), 5]}
),
)
df_result = (
c.sql("SELECT * FROM df ORDER BY a ASC NULLS FIRST")
.compute()
.reset_index(drop=True)
)
assert_frame_equal(
df_result,
pd.DataFrame(
{"a": [float("nan"), 1, 2, 2], "b": [5, 4, float("nan"), float("inf")]}
),
)
df_result = (
c.sql("SELECT * FROM df ORDER BY a ASC NULLS LAST")
.compute()
.reset_index(drop=True)
)
assert_frame_equal(
df_result,
pd.DataFrame(
{"a": [1, 2, 2, float("nan")], "b": [4, float("nan"), float("inf"), 5]}
),
)
df_result = (
c.sql("SELECT * FROM df ORDER BY a DESC").compute().reset_index(drop=True)
)
assert_frame_equal(
df_result,
pd.DataFrame(
{"a": [float("nan"), 2, 2, 1], "b": [5, float("inf"), float("nan"), 4]}
),
)
df_result = (
c.sql("SELECT * FROM df ORDER BY a DESC NULLS FIRST")
.compute()
.reset_index(drop=True)
)
assert_frame_equal(
df_result,
pd.DataFrame(
{"a": [float("nan"), 2, 2, 1], "b": [5, float("inf"), float("nan"), 4]}
),
)
df_result = (
c.sql("SELECT * FROM df ORDER BY a DESC NULLS LAST")
.compute()
.reset_index(drop=True)
)
assert_frame_equal(
df_result,
pd.DataFrame(
{"a": [2, 2, 1, float("nan")], "b": [float("inf"), float("nan"), 4, 5]}
),
)
def test_sort_with_nan_more_columns():
c = Context()
df = pd.DataFrame(
{
"a": [1, 1, 2, 2, float("nan"), float("nan")],
"b": [1, 1, 2, float("nan"), float("inf"), 5],
"c": [1, float("nan"), 3, 4, 5, 6],
}
)
c.create_table("df", df)
df_result = (
c.sql(
"SELECT * FROM df ORDER BY a ASC NULLS FIRST, b DESC NULLS LAST, c ASC NULLS FIRST"
)
.c.compute()
.reset_index(drop=True)
)
assert_series_equal(
df_result, pd.Series([5, 6, float("nan"), 1, 3, 4]), check_names=False
)
df_result = (
c.sql(
"SELECT * FROM df ORDER BY a ASC NULLS LAST, b DESC NULLS FIRST, c DESC NULLS LAST"
)
.c.compute()
.reset_index(drop=True)
)
assert_series_equal(
df_result, pd.Series([1, float("nan"), 4, 3, 5, 6]), check_names=False
)
def test_sort_strings(c):
string_table = pd.DataFrame({"a": ["zzhsd", "öfjdf", "baba"]})
c.create_table("string_table", string_table)
df_result = c.sql(
"""
SELECT
*
FROM string_table
ORDER BY a
"""
)
df_result = df_result.compute().reset_index(drop=True)
df_expected = string_table.sort_values(["a"], ascending=True).reset_index(drop=True)
assert_frame_equal(df_result, df_expected)
def test_sort_not_allowed(c):
# Wrong column
with pytest.raises(Exception):
c.sql("SELECT * FROM user_table_1 ORDER BY 42")
def test_limit(c, long_table):
df_result = c.sql("SELECT * FROM long_table LIMIT 101")
df_result = df_result.compute()
assert_frame_equal(df_result, long_table.iloc[:101])
df_result = c.sql("SELECT * FROM long_table LIMIT 200")
df_result = df_result.compute()
assert_frame_equal(df_result, long_table.iloc[:200])
df_result = c.sql("SELECT * FROM long_table LIMIT 100")
df_result = df_result.compute()
assert_frame_equal(df_result, long_table.iloc[:100])
df_result = c.sql("SELECT * FROM long_table LIMIT 100 OFFSET 99")
df_result = df_result.compute()
assert_frame_equal(df_result, long_table.iloc[99 : 99 + 100])
df_result = c.sql("SELECT * FROM long_table LIMIT 100 OFFSET 100")
df_result = df_result.compute()
assert_frame_equal(df_result, long_table.iloc[100 : 100 + 100])
df_result = c.sql("SELECT * FROM long_table LIMIT 101 OFFSET 101")
df_result = df_result.compute()
assert_frame_equal(df_result, long_table.iloc[101 : 101 + 101])
| 25.617544
| 95
| 0.563895
| 1,013
| 7,301
| 3.84304
| 0.080948
| 0.143848
| 0.061649
| 0.070896
| 0.82841
| 0.799897
| 0.792962
| 0.770357
| 0.770357
| 0.745184
| 0
| 0.02713
| 0.283112
| 7,301
| 284
| 96
| 25.707746
| 0.71666
| 0.001644
| 0
| 0.547739
| 0
| 0
| 0.142606
| 0
| 0
| 0
| 0
| 0
| 0.120603
| 1
| 0.035176
| false
| 0
| 0.025126
| 0
| 0.060302
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
5dbd77146643a2151b1e520363ca2ce60b096fb1
| 6,959
|
py
|
Python
|
tests/test_jobs/test_serializers.py
|
gzcf/polyaxon
|
77ac8838c6444a36541e6c28aba7ae42de392fee
|
[
"MIT"
] | null | null | null |
tests/test_jobs/test_serializers.py
|
gzcf/polyaxon
|
77ac8838c6444a36541e6c28aba7ae42de392fee
|
[
"MIT"
] | null | null | null |
tests/test_jobs/test_serializers.py
|
gzcf/polyaxon
|
77ac8838c6444a36541e6c28aba7ae42de392fee
|
[
"MIT"
] | null | null | null |
from unittest.mock import patch
import pytest
from api.jobs.serializers import JobDetailSerializer, JobSerializer, JobStatusSerializer
from constants.jobs import JobLifeCycle
from db.models.jobs import Job, JobStatus
from factories.factory_jobs import JobFactory, JobStatusFactory
from tests.utils import BaseTest
@pytest.mark.jobs_mark
class TestJobSerializer(BaseTest):
DISABLE_RUNNER = True
serializer_class = JobSerializer
model_class = Job
factory_class = JobFactory
expected_keys = {
'id',
'uuid',
'name',
'user',
'unique_name',
'description',
'created_at',
'updated_at',
'last_status',
'started_at',
'finished_at',
'tags',
'project',
'build_job',
}
def setUp(self):
super().setUp()
self.obj1 = self.factory_class()
self.obj2 = self.factory_class()
def test_serialize_one(self):
data = self.serializer_class(self.obj1).data
assert set(data.keys()) == self.expected_keys
assert data.pop('uuid') == self.obj1.uuid.hex
assert data.pop('user') == self.obj1.user.username
assert data.pop('project') == self.obj1.project.unique_name
assert data.pop('build_job') == (
self.obj1.build_job.unique_name if self.obj1.build_job else None)
assert data.pop('last_status') == self.obj1.last_status
data.pop('created_at')
data.pop('updated_at')
data.pop('started_at', None)
data.pop('finished_at', None)
for k, v in data.items():
assert getattr(self.obj1, k) == v
def test_serialize_one_with_status(self):
obj1 = self.factory_class()
data = self.serializer_class(obj1).data
assert set(data.keys()) == self.expected_keys
assert data['started_at'] is None
assert data['finished_at'] is None
JobStatus.objects.create(job=obj1, status=JobLifeCycle.SCHEDULED)
data = self.serializer_class(obj1).data
assert set(data.keys()) == self.expected_keys
assert data['started_at'] is not None
assert data['finished_at'] is None
JobStatus.objects.create(job=obj1, status=JobLifeCycle.SUCCEEDED)
data = self.serializer_class(obj1).data
assert set(data.keys()) == self.expected_keys
assert data['started_at'] is not None
assert data['finished_at'] is not None
def test_serialize_many(self):
data = self.serializer_class(self.model_class.objects.all(), many=True).data
assert len(data) == 2
for d in data:
assert set(d.keys()) == self.expected_keys
@pytest.mark.jobs_mark
class TestJobDetailSerializer(BaseTest):
DISABLE_RUNNER = True
serializer_class = JobDetailSerializer
model_class = Job
factory_class = JobFactory
expected_keys = {
'id',
'uuid',
'name',
'unique_name',
'created_at',
'updated_at',
'project',
'build_job',
'user',
'last_status',
'description',
'config',
'tags',
'started_at',
'finished_at',
'is_clone',
'build_job',
'original',
'resources',
'node_scheduled',
'bookmarked'
}
def setUp(self):
super().setUp()
self.obj1 = self.factory_class()
self.obj2 = self.factory_class()
def test_serialize_one(self):
data = self.serializer_class(self.obj1).data
assert set(data.keys()) == self.expected_keys
assert data.pop('uuid') == self.obj1.uuid.hex
assert data.pop('user') == self.obj1.user.username
assert data.pop('project') == self.obj1.project.unique_name
assert data.pop('build_job') == (self.obj1.build_job.unique_name if
self.obj1.build_job else None)
assert data.pop('original') == (self.obj1.original_job.unique_name if
self.obj1.original_job else None)
assert data.pop('last_status') == self.obj1.last_status
assert data.pop('bookmarked') is False
data.pop('created_at')
data.pop('updated_at')
data.pop('started_at', None)
data.pop('finished_at', None)
for k, v in data.items():
assert getattr(self.obj1, k) == v
def test_serialize_one_with_status(self):
obj1 = self.factory_class()
data = self.serializer_class(obj1).data
assert set(data.keys()) == self.expected_keys
assert data['started_at'] is None
assert data['finished_at'] is None
JobStatus.objects.create(job=obj1, status=JobLifeCycle.SCHEDULED)
data = self.serializer_class(obj1).data
assert set(data.keys()) == self.expected_keys
assert data['started_at'] is not None
assert data['finished_at'] is None
JobStatus.objects.create(job=obj1, status=JobLifeCycle.SUCCEEDED)
data = self.serializer_class(obj1).data
assert set(data.keys()) == self.expected_keys
assert data['started_at'] is not None
assert data['finished_at'] is not None
def test_cloned(self):
obj1 = self.factory_class()
data = self.serializer_class(obj1).data
assert set(data.keys()) == self.expected_keys
assert data['is_clone'] is False
obj2 = self.factory_class()
obj1.original_job = obj2
obj1.save()
data = self.serializer_class(obj1).data
assert set(data.keys()) == self.expected_keys
assert data['is_clone'] is True
def test_serialize_many(self):
data = self.serializer_class(self.model_class.objects.all(), many=True).data
assert len(data) == 2
for d in data:
assert set(d.keys()) == self.expected_keys
@pytest.mark.jobs_mark
class TestJobStatusSerializer(BaseTest):
DISABLE_RUNNER = True
serializer_class = JobStatusSerializer
model_class = JobStatus
factory_class = JobStatusFactory
expected_keys = {'id', 'uuid', 'job', 'created_at', 'status', 'message', 'details'}
def setUp(self):
super().setUp()
with patch.object(Job, 'set_status') as _: # noqa
self.obj1 = self.factory_class()
self.obj2 = self.factory_class()
def test_serialize_one(self):
data = self.serializer_class(self.obj1).data
assert set(data.keys()) == self.expected_keys
assert data.pop('uuid') == self.obj1.uuid.hex
assert data.pop('job') == self.obj1.job.id
data.pop('created_at')
for k, v in data.items():
assert getattr(self.obj1, k) == v
def test_serialize_many(self):
data = self.serializer_class(self.model_class.objects.all(), many=True).data
assert len(data) == 2
for d in data:
assert set(d.keys()) == self.expected_keys
| 32.069124
| 88
| 0.617761
| 853
| 6,959
| 4.873388
| 0.12075
| 0.053885
| 0.060621
| 0.07746
| 0.78013
| 0.769305
| 0.734905
| 0.734905
| 0.734905
| 0.734905
| 0
| 0.009766
| 0.264262
| 6,959
| 216
| 89
| 32.217593
| 0.802148
| 0.000575
| 0
| 0.779661
| 0
| 0
| 0.094635
| 0
| 0
| 0
| 0
| 0
| 0.271186
| 1
| 0.067797
| false
| 0
| 0.039548
| 0
| 0.20904
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
f8fe8e3cac45dece6c65701cd98d52641b402ca9
| 215
|
py
|
Python
|
XKT/utils/__init__.py
|
bigdata-ustc/XKT
|
b3ac07541b92001b62d7cff4e8fe7e5a69c5c93c
|
[
"MIT"
] | 17
|
2019-09-11T12:00:05.000Z
|
2022-03-30T04:41:05.000Z
|
XKT/utils/__init__.py
|
bigdata-ustc/XKT
|
b3ac07541b92001b62d7cff4e8fe7e5a69c5c93c
|
[
"MIT"
] | 1
|
2021-10-24T01:13:33.000Z
|
2021-10-24T02:03:26.000Z
|
XKT/utils/__init__.py
|
bigdata-ustc/XKT
|
b3ac07541b92001b62d7cff4e8fe7e5a69c5c93c
|
[
"MIT"
] | 6
|
2019-09-13T07:50:07.000Z
|
2022-03-12T00:22:11.000Z
|
# coding: utf-8
# create by tongshiwei on 2019-7-13
from .etl import *
from .loss import SequenceLogisticMaskLoss as SLMLoss, LogisticMaskLoss as LMLoss
from .loss import SequenceLogisticMaskLoss, LogisticMaskLoss
| 30.714286
| 81
| 0.813953
| 27
| 215
| 6.481481
| 0.703704
| 0.091429
| 0.16
| 0.434286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.042781
| 0.130233
| 215
| 6
| 82
| 35.833333
| 0.893048
| 0.218605
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
5d2bf13fb0bea759ba675e868d1472a4df028e0f
| 27
|
py
|
Python
|
exostriker/__init__.py
|
exoristos21/exostriker
|
85cee34744bcd6e960dcdffc9140bb1d9107982e
|
[
"MIT"
] | 69
|
2020-01-06T13:31:06.000Z
|
2022-03-29T11:23:14.000Z
|
exostriker/__init__.py
|
sai-33/Exostriker
|
f59fa51c6bdce3a2ed51d6621fe42bfcd8c2846f
|
[
"MIT"
] | 67
|
2019-11-30T14:45:05.000Z
|
2022-03-14T20:26:06.000Z
|
exostriker/__init__.py
|
sai-33/Exostriker
|
f59fa51c6bdce3a2ed51d6621fe42bfcd8c2846f
|
[
"MIT"
] | 13
|
2020-01-06T13:44:40.000Z
|
2022-03-29T11:23:17.000Z
|
from exostriker import gui
| 13.5
| 26
| 0.851852
| 4
| 27
| 5.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.148148
| 27
| 1
| 27
| 27
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
5d3e1029d3649948176aac1cf9c38d5a370f155c
| 22
|
py
|
Python
|
hmi/__init__.py
|
hsuanhauliu/image-hidden-message
|
f7e38fb8f2922e9a1c57665d281c74cd9b6a297e
|
[
"MIT"
] | null | null | null |
hmi/__init__.py
|
hsuanhauliu/image-hidden-message
|
f7e38fb8f2922e9a1c57665d281c74cd9b6a297e
|
[
"MIT"
] | null | null | null |
hmi/__init__.py
|
hsuanhauliu/image-hidden-message
|
f7e38fb8f2922e9a1c57665d281c74cd9b6a297e
|
[
"MIT"
] | null | null | null |
from hmi.hmi import *
| 11
| 21
| 0.727273
| 4
| 22
| 4
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.181818
| 22
| 1
| 22
| 22
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
5d4ae1296c83a059b8e03dec747794ee9e3b2b0b
| 34
|
py
|
Python
|
libs/yowsup/yowsup/yowsup/demos/sendclient/__init__.py
|
akshitpradhan/TomHack
|
837226e7b38de1140c19bc2d478eeb9e379ed1fd
|
[
"MIT"
] | 22
|
2017-07-14T20:01:17.000Z
|
2022-03-08T14:22:39.000Z
|
libs/yowsup/yowsup/yowsup/demos/sendclient/__init__.py
|
akshitpradhan/TomHack
|
837226e7b38de1140c19bc2d478eeb9e379ed1fd
|
[
"MIT"
] | 6
|
2017-07-14T21:03:50.000Z
|
2021-06-10T19:08:32.000Z
|
libs/yowsup/yowsup/yowsup/demos/sendclient/__init__.py
|
akshitpradhan/TomHack
|
837226e7b38de1140c19bc2d478eeb9e379ed1fd
|
[
"MIT"
] | 13
|
2017-07-14T20:13:14.000Z
|
2020-11-12T08:06:05.000Z
|
from .stack import YowsupSendStack
| 34
| 34
| 0.882353
| 4
| 34
| 7.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.088235
| 34
| 1
| 34
| 34
| 0.967742
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
5d52455c8b1921c9d57097589dda983dfd3194d0
| 163
|
py
|
Python
|
boml/lower_iter/__init__.py
|
LongMa319/BOML
|
8cbb5a557e93dabd858438efd67c0685402efa9e
|
[
"MIT"
] | 2
|
2021-12-20T03:24:27.000Z
|
2022-01-10T14:16:21.000Z
|
boml/lower_iter/__init__.py
|
perseveranceLX/BOML
|
8cbb5a557e93dabd858438efd67c0685402efa9e
|
[
"MIT"
] | null | null | null |
boml/lower_iter/__init__.py
|
perseveranceLX/BOML
|
8cbb5a557e93dabd858438efd67c0685402efa9e
|
[
"MIT"
] | 1
|
2022-03-29T13:21:20.000Z
|
2022-03-29T13:21:20.000Z
|
from boml.lower_iter.inner_grad import BOMLInnerGradTrad
from boml.lower_iter.simple import BOMLInnerGradSimple
from boml.lower_iter.aggr import BOMLInnerGradAggr
| 40.75
| 56
| 0.889571
| 22
| 163
| 6.409091
| 0.545455
| 0.170213
| 0.276596
| 0.361702
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.07362
| 163
| 3
| 57
| 54.333333
| 0.933775
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
5d7394fc529391b2bd147b81bc5fdf3c06ccbf7a
| 103,618
|
py
|
Python
|
tests/licensedcode/test_match.py
|
jimjag/scancode-toolkit
|
37d574b194696261dad486c6771f6e7dc4138eac
|
[
"Apache-2.0",
"CC-BY-4.0"
] | 1,511
|
2015-07-01T15:29:03.000Z
|
2022-03-30T13:40:05.000Z
|
tests/licensedcode/test_match.py
|
jimjag/scancode-toolkit
|
37d574b194696261dad486c6771f6e7dc4138eac
|
[
"Apache-2.0",
"CC-BY-4.0"
] | 2,695
|
2015-07-01T16:01:35.000Z
|
2022-03-31T19:17:44.000Z
|
tests/licensedcode/test_match.py
|
jimjag/scancode-toolkit
|
37d574b194696261dad486c6771f6e7dc4138eac
|
[
"Apache-2.0",
"CC-BY-4.0"
] | 540
|
2015-07-01T15:08:19.000Z
|
2022-03-31T12:13:11.000Z
|
# -*- coding: utf-8 -*-
#
# Copyright (c) nexB Inc. and others. All rights reserved.
# ScanCode is a trademark of nexB Inc.
# SPDX-License-Identifier: Apache-2.0
# See http://www.apache.org/licenses/LICENSE-2.0 for the license text.
# See https://github.com/nexB/scancode-toolkit for support or download.
# See https://aboutcode.org for more information about nexB OSS projects.
#
import os
from commoncode.testcase import FileBasedTesting
from licensedcode import cache
from licensedcode import index
from licensedcode import models
from licensedcode.index import LicenseIndex
from licensedcode.match import filter_contained_matches
from licensedcode.match import filter_matches_missing_key_phrases
from licensedcode.match import filter_overlapping_matches
from licensedcode.match import get_full_matched_text
from licensedcode.match import get_matching_regions
from licensedcode.match import LicenseMatch
from licensedcode.match import merge_matches
from licensedcode.match import reportable_tokens
from licensedcode.match import restore_non_overlapping
from licensedcode.match import tokenize_matched_text
from licensedcode.match import Token
from licensedcode.models import Rule
from licensedcode.models import load_rules
from licensedcode.query import Query
from licensedcode.spans import Span
TEST_DATA_DIR = os.path.join(os.path.dirname(__file__), 'data')
class TestLicenseMatchBasic(FileBasedTesting):
test_data_dir = TEST_DATA_DIR
def test_LicenseMatch_equality(self):
r1 = Rule(stored_text='r1', license_expression='apache-2.0 OR gpl')
m1_r1 = LicenseMatch(rule=r1, qspan=Span(0, 2), ispan=Span(0, 2))
m2_r1 = LicenseMatch(rule=r1, qspan=Span(0, 2), ispan=Span(0, 2))
assert m1_r1 == m2_r1
assert not (m1_r1 != m2_r1)
r2 = Rule(stored_text='r1', license_expression='apache-2.0 OR gpl')
m3_r2 = LicenseMatch(rule=r2, qspan=Span(0, 2), ispan=Span(0, 2))
assert r1 == r2
assert m1_r1 == m3_r2
def test_LicenseMatch_equality_2(self):
r1 = Rule(stored_text='r1', license_expression='apache-2.0 OR gpl')
m1_r1 = LicenseMatch(rule=r1, qspan=Span(0, 2), ispan=Span(0, 2))
r2 = Rule(stored_text='r2', license_expression='gpl OR apache-2.0')
m2_r2 = LicenseMatch(rule=r2, qspan=Span(0, 2), ispan=Span(0, 2))
assert r1.licensing is r2.licensing
assert r1 != r2
assert r1.license_expression != r2.license_expression
assert r1.license_expression_object == r2.license_expression_object
assert str(r1.license_expression_object.simplify()) == str(r2.license_expression_object.simplify())
assert m1_r1 == m2_r2
assert not (m1_r1 != m2_r2)
assert r2.same_licensing(r2)
assert m1_r1.qspan == m2_r2.qspan
assert m1_r1.ispan == m2_r2.ispan
r3 = Rule(stored_text='r3', license_expression='gpl OR apache-2.0')
m3_r3 = LicenseMatch(rule=r3, qspan=Span(0, 2), ispan=Span(0, 3))
assert m2_r2 != m3_r3
r4 = Rule(stored_text='r3', license_expression='gpl1 OR apache-2.0')
m4_r4 = LicenseMatch(rule=r4, qspan=Span(0, 2), ispan=Span(0, 3))
assert m3_r3 != m4_r4
def test_LicenseMatch_not_equal(self):
r1 = Rule(text_file='r1', license_expression='apache-1.0 OR gpl')
m1 = LicenseMatch(rule=r1, qspan=Span(0, 2), ispan=Span(0, 2))
r2 = Rule(text_file='r2', license_expression='gpl OR apache-2.0')
m2 = LicenseMatch(rule=r2, qspan=Span(0, 2), ispan=Span(0, 2))
assert m1 != m2
r3 = Rule(text_file='r3', license_expression='apache-1.0 OR gpl')
m3 = LicenseMatch(rule=r3, qspan=Span(0, 2), ispan=Span(0, 2))
assert m1 == m3
r4 = Rule(text_file='r4', license_expression='apache-1.0 OR gpl')
m4 = LicenseMatch(rule=r4, qspan=Span(1, 2), ispan=Span(1, 2))
assert not m1 == m4
def test_LicenseMatch_equals(self):
rule = Rule(text_file='r1', license_expression='apache-2.0 OR gpl')
m1 = LicenseMatch(rule=rule, matcher='chunk1', qspan=Span(0, 7), ispan=Span(0, 7), start_line=1, end_line=1)
m2 = LicenseMatch(rule=rule, matcher='chunk2', qspan=Span(0, 7), ispan=Span(0, 7), start_line=1, end_line=1)
assert m1 == m2
m3 = LicenseMatch(rule=rule, matcher='chunk3', qspan=Span(16, 23), ispan=Span(0, 7), start_line=3, end_line=3)
assert m1 != m3
def test_LicenseMatch_comparisons(self):
r1 = Rule(text_file='r1', license_expression='apache-2.0 OR gpl')
contained1 = LicenseMatch(rule=r1, qspan=Span(0, 5), ispan=Span(0, 5))
contained2 = LicenseMatch(rule=r1, qspan=Span(1, 4), ispan=Span(1, 4))
same_span1 = LicenseMatch(rule=r1, qspan=Span(1, 6), ispan=Span(1, 6))
same_span2 = LicenseMatch(rule=r1, qspan=Span(1, 6), ispan=Span(1, 6))
before_after = LicenseMatch(rule=r1, qspan=Span(8, 9), ispan=Span(8, 9))
touching = LicenseMatch(rule=r1, qspan=Span(7, 7), ispan=Span(7, 7))
overlapping = LicenseMatch(rule=r1, qspan=Span(4, 7), ispan=Span(4, 7))
assert same_span1 == same_span2
assert same_span1 in same_span2
assert same_span1.overlap(same_span2)
assert same_span2.overlap(same_span1)
assert contained1 not in same_span1
assert same_span1 not in contained1
assert contained1.overlap(same_span2)
assert contained1.surround(contained2)
assert contained2 in same_span2
assert contained2 in contained1
assert contained2.overlap(overlapping)
assert overlapping.overlap(contained2)
assert overlapping.overlap(same_span1)
assert not overlapping.overlap(before_after)
assert before_after.is_after(same_span1)
assert before_after.is_after(touching)
assert before_after.is_after(contained1)
def test_combine_raise_TypeError_for_matches_of_different_rules(self):
r1 = Rule(text_file='r1', license_expression='apache-2.0 OR gpl')
r2 = Rule(text_file='r2', license_expression='apache-2.0 OR gpl2')
m1 = LicenseMatch(rule=r1, qspan=Span(0, 5), ispan=Span(0, 5))
m2 = LicenseMatch(rule=r2, qspan=Span(1, 6), ispan=Span(1, 6))
try:
m1.combine(m2)
except TypeError:
pass
def test_combine_matches_with_same_rules(self):
r1 = Rule(text_file='r1', license_expression='apache-2.0 OR gpl')
m1 = LicenseMatch(rule=r1, qspan=Span(0, 5), ispan=Span(0, 5))
m2 = LicenseMatch(rule=r1, qspan=Span(1, 6), ispan=Span(1, 6))
match = m1.combine(m2)
assert match.qspan == Span(0, 6)
assert match.ispan == Span(0, 6)
def test_combine_matches_cannot_combine_matches_with_same_licensing_and_different_rules(self):
r1 = Rule(text_file='r1', license_expression='apache-2.0 OR gpl')
r2 = Rule(text_file='r2', license_expression='apache-2.0 OR gpl')
m1 = LicenseMatch(rule=r1, qspan=Span(0, 5), ispan=Span(0, 5))
m2 = LicenseMatch(rule=r2, qspan=Span(1, 6), ispan=Span(1, 6))
try:
m1.combine(m2)
self.fail('Should fail')
except TypeError:
pass
def test_LicenseMatch_small(self):
r1_text = u'licensed under the GPL, licensed under the GPL distribute extent of law'
small_rule = Rule(text_file='small_rule', license_expression='apache-1.1', stored_text=r1_text)
r2_text = u'licensed under the GPL, licensed under the GPL re distribute extent of law' * 10
long_rule = Rule(text_file='long_rule', license_expression='apache-1.1', stored_text=r2_text)
_idx = index.LicenseIndex([small_rule, long_rule])
test = LicenseMatch(rule=small_rule, qspan=Span(0, 10), ispan=Span(0, 10), hispan=Span(12))
assert test.is_small()
test = LicenseMatch(rule=small_rule, qspan=Span(0, 10), ispan=Span(0, 10), hispan=Span(11, 12))
assert test.is_small()
test = LicenseMatch(rule=small_rule, qspan=Span(10, 11, 12), ispan=Span(10, 11, 12), hispan=Span(11, 12))
assert test.is_small()
test = LicenseMatch(rule=small_rule, qspan=Span(1, 6), ispan=Span(1, 6))
assert test.is_small()
test = LicenseMatch(rule=long_rule, qspan=Span(0, 10), ispan=Span(0, 10), hispan=Span(12))
assert test.is_small()
test = LicenseMatch(rule=long_rule, qspan=Span(5, 10), ispan=Span(5, 10), hispan=Span(5, 6))
assert test.is_small()
test = LicenseMatch(rule=small_rule, qspan=Span(1, 10), ispan=Span(1, 10), hispan=Span(3, 6))
assert not test.is_small()
def test_LicenseMatch_score_is_not_100_with_aho_match_and_extra_unknown_token_hash_match(self):
text = (
'this file is licensed under the GPL license version2 only '
'or any other version. You can redistribute this file under '
'this or any other license.')
r1 = Rule(text_file='r1', license_expression='apache-1.1', stored_text=text)
idx = index.LicenseIndex([r1])
querys = (
'this file is licensed under the GPL license version2 only '
+' big ' +
'or any other version. You can redistribute this file under '
'this or any other license.')
match = idx.match(query_string=querys)[0]
assert match.score() < 100
def test_LicenseMatch_score_is_not_100_with_aho_match_and_extra_unknown_token_seq_match(self):
text = (
'this file is licensed under the GPL license version2 only '
'or any other version. You can redistribute this file under '
'this or any other license.')
r1 = Rule(text_file='r1', license_expression='apache-1.1', stored_text=text)
idx = index.LicenseIndex([r1])
querys = (
'this file is licensed under the GPL license version2 only '
+' is ' +
'or any other version. You can redistribute this file under '
'this or any other license.')
match = idx.match(query_string=querys)[0]
assert match.score() < 100
def test_LicenseMatch_score_is_not_100_with_aho_match_and_extra_unknown_token_aho_match(self):
text = (
'this file is licensed under the GPL license version2 only '
'or any other version. You can redistribute this file under '
'this or any other license.')
r1 = Rule(text_file='r1', license_expression='apache-1.1', stored_text=text)
idx = index.LicenseIndex([r1])
querys = (
'this this file is licensed under the GPL license version2 only '
+' big ' +
'or any other version. You can redistribute this file under '
'this or any other license. that')
match = idx.match(query_string=querys)[0]
assert match.score() < 100
def test_LicenseMatch_matches_only_when_all_key_phrases_are_present(self):
text_r1 = (
'License '
'Distributed under the {{MIT License}}. See LICENSE for {{more information}}.'
'You can redistribute this file under this or any other license.')
r1 = Rule(text_file='r1', license_expression='mit', stored_text=text_r1)
text_r2 = (
'License '
'Distributed under the {{GPL License}} License. See LICENSE for {{more information}}.'
'You can redistribute this file under this or any other license.')
r2 = Rule(text_file='r2', license_expression='gpl', stored_text=text_r2)
idx = index.LicenseIndex([r1, r2])
querys = (
'License '
'Distributed under the Apache License. See LICENSE for more information.'
'You can redistribute this file under this or any other license.')
matches = idx.match(query_string=querys)
assert not matches
def test_LicenseMatch_matches_only_when_all_key_phrases_are_present_in_order(self):
text_r1 = (
'License '
'Distributed under the {{MIT License}}. See LICENSE for more information. '
'{{You can redistribute this file}} under this or any other license. '
)
r1 = Rule(text_file='r1', license_expression='mit', stored_text=text_r1)
text_r2 = 'Foo bar'
r2 = Rule(text_file='r2', license_expression='gpl', stored_text=text_r2)
idx = index.LicenseIndex([r1, r2])
querys = (
'License '
'Distributed under the License MIT. See LICENSE for more information. '
'You can redistribute this file under this or any other license. '
' and otherwise foo bar'
)
matches = idx.match(query_string=querys)
assert len(matches) == 1
assert matches[0].rule == r2
def test_LicenseMatch_matches_only_when_key_phrases_are_uninterrupted_by_unknown(self):
text_r1 = (
'License '
'Distributed under the {{MIT License}}. See LICENSE for more information.'
'You can redistribute this file under this or any other license.')
r1 = Rule(text_file='r1', license_expression='mit', stored_text=text_r1)
text_r2 = (
'License '
'Distributed under the BSD License. See LICENSE for more information.'
'You can redistribute this file under this or any other license.')
r2 = Rule(text_file='r2', license_expression='gpl', stored_text=text_r2)
idx = index.LicenseIndex([r1, r2])
querys = (
'See LICENSE for more information, and also you can redistribute this file under this or any other license.'
'License '
'Distributed under the MIT, foobar License. See LICENSE or website for more information.'
'You can redistribute this file under this or any other license.'
)
matches = idx.match(query_string=querys)
assert len(matches) == 1
assert matches[0].rule == r2
def test_LicenseMatch_matches_only_when_key_phrases_are_uninterrupted_by_stopword(self):
text_r1 = (
'License '
'Distributed under the {{MIT License}}. See LICENSE for more information.'
'You can redistribute this file under this or any other license.')
r1 = Rule(text_file='r1', license_expression='mit', stored_text=text_r1)
text_r2 = (
'License '
'Distributed under the BSD License. See LICENSE for more information.'
'You can redistribute this file under this or any other license.')
r2 = Rule(text_file='r2', license_expression='gpl', stored_text=text_r2)
idx = index.LicenseIndex([r1, r2])
querys = (
'See LICENSE for more information, and also you can redistribute this file under this or any other license.'
'License '
'Distributed under the MIT, a License. See LICENSE or website for more information.'
# ^ stopword ^
'You can redistribute this file under this or any other license.'
)
matches = idx.match(query_string=querys)
assert len(matches) == 1
assert matches[0].rule == r2
def test_LicenseMatch_matches_key_phrases_aho_with_exact_match_selects_key_phrase_match(self):
text_r1 = (
'License '
'Distributed under the {{MIT License}}. See LICENSE for more information.'
)
r1 = Rule(text_file='r1', license_expression='mit', stored_text=text_r1)
text_r2 = (
'License '
'Distributed under the {{BSD License}}. See LICENSE for more information.'
'You can redistribute this file under this or any other license.')
r2 = Rule(text_file='r2', license_expression='bsd', stored_text=text_r2)
idx = index.LicenseIndex([r1, r2])
querys = (
'License '
'Distributed under the MIT License. See LICENSE for more information.'
'You can redistribute this file under this or any other license.'
)
matches = idx.match(query_string=querys, _skip_hash_match=True)
assert len(matches) == 1
assert matches[0].rule == r1
def test_LicenseMatch_matches_only_when_key_phrase_is_uninterrupted(self):
text_r1 = (
'licensed under the '
'{{Creative Commons Attribution 4.0 License}} '
'(the "License"); '
' this is a license with has several interesting characteristics '
)
r1 = Rule(text_file='r1', license_expression='keyphrase', stored_text=text_r1)
text_r2 = (
'licensed under the '
'Creative Commons Attribution 4.0 License '
'(the "License"); '
' this is a license that has several interesting characteristics also '
)
r2 = Rule(text_file='r2', license_expression='plain', stored_text=text_r2)
legalese = set(['licensed', 'license', 'attribution', ])
idx = index.LicenseIndex([r1, r2], _legalese=legalese)
assert r1.key_phrase_spans == [Span(3, 8)]
assert r2.key_phrase_spans == []
# NonCommercial and ShareAlike are "unknown" words here
# therefore we should match r2 as as a sequence and not r1 because the
# key phrase are interrupted
querys = (
'This work is '
# 0 UW 1
'licensed under the '
# 2 3 4
'Creative Commons Attribution-Share Alike 4.0 License '
# 5 6 7 UW UW 8 9 10
'(the "License"). '
# 11 12
'this is a license that has several interesting characteristics FOO'
# 13 14 SW 15 16 17 18 19 20 UW 21
)
matches = idx.match(query_string=querys)
assert len(matches) == 1
match = matches[0]
assert match.query.unknowns_by_pos == {0: 1, 7: 2, 20: 1}
assert match.qspan == Span(2, 20)
itokens = [idx.tokens_by_tid[i] for i in match.itokens(idx)]
assert itokens == [
'licensed',
'under',
'the',
'creative',
'commons',
'attribution',
'4',
'0',
'license',
'the',
'license',
'this',
'is',
'license',
'that',
'has',
'several',
'interesting',
'characteristics',
]
assert match.rule == r2
class TestMergeMatches(FileBasedTesting):
test_data_dir = TEST_DATA_DIR
def test_merge_does_merge_non_contiguous_matches_in_sequence(self):
r1 = Rule(text_file='r1', license_expression='apache-2.0 OR gpl')
m1 = LicenseMatch(rule=r1, qspan=Span(0, 2), ispan=Span(0, 2))
m2 = LicenseMatch(rule=r1, qspan=Span(4, 6), ispan=Span(4, 6))
m5 = LicenseMatch(rule=r1, qspan=Span(1, 6), ispan=Span(1, 6))
results = merge_matches([m1, m2, m5])
assert results == [LicenseMatch(rule=r1, qspan=Span(0, 6), ispan=Span(0, 6))]
def test_merge_does_not_merge_overlapping_matches_of_different_rules_with_different_licensing(self):
r1 = Rule(text_file='r1', license_expression='apache-2.0 OR gpl')
r2 = Rule(text_file='r2', license_expression='apache-2.0 OR gpl2')
m1 = LicenseMatch(rule=r1, qspan=Span(0, 5), ispan=Span(0, 5))
m2 = LicenseMatch(rule=r2, qspan=Span(1, 6), ispan=Span(1, 6))
results = merge_matches([m1, m2])
assert results == [m1, m2]
def test_merge_does_merge_overlapping_matches_of_same_rules_if_in_sequence(self):
r1 = Rule(text_file='r1', license_expression='apache-2.0 OR gpl')
m1 = LicenseMatch(rule=r1, qspan=Span(0, 5), ispan=Span(0, 5))
m2 = LicenseMatch(rule=r1, qspan=Span(1, 6), ispan=Span(1, 6))
results = merge_matches([m1, m2])
assert results == [LicenseMatch(rule=r1, qspan=Span(0, 6), ispan=Span(0, 6))]
def test_merge_does_not_merge_overlapping_matches_of_same_rules_if_in_sequence_with_gaps(self):
r1 = Rule(text_file='r1', license_expression='apache-2.0 OR gpl')
r1.length = 50
m1 = LicenseMatch(rule=r1, qspan=Span(1, 3), ispan=Span(1, 3))
m2 = LicenseMatch(rule=r1, qspan=Span(14, 20), ispan=Span(4, 10))
expected = [LicenseMatch(rule=r1, qspan=Span(1, 3) | Span(14, 20), ispan=Span(1, 10))]
results = merge_matches([m1, m2])
assert results == expected
def test_merge_does_not_merge_overlapping_matches_of_same_rules_if_in_sequence_with_gaps_for_long_match(self):
r1 = Rule(text_file='r1', license_expression='apache-2.0 OR gpl')
r1.length = 20
m1 = LicenseMatch(rule=r1, qspan=Span(1, 10), ispan=Span(1, 10))
m2 = LicenseMatch(rule=r1, qspan=Span(14, 20), ispan=Span(14, 20))
expected = [LicenseMatch(rule=r1, qspan=Span(1, 10) | Span(14, 20), ispan=Span(1, 10) | Span(14, 20))]
results = merge_matches([m1, m2])
assert results == expected
def test_merge_does_not_merge_overlapping_matches_of_same_rules_if_in_not_sequence(self):
r1 = Rule(text_file='r1', license_expression='apache-2.0 OR gpl')
m1 = LicenseMatch(rule=r1, qspan=Span(1, 3), ispan=Span(1, 3))
m2 = LicenseMatch(rule=r1, qspan=Span(14, 20), ispan=Span(1, 3))
matches = merge_matches([m1, m2])
assert sorted(matches) == sorted([m1, m2])
def test_merge_does_not_merge_contained_matches_of_different_rules_with_same_licensing(self):
r1 = Rule(text_file='r1', license_expression='apache-2.0 OR gpl')
r2 = Rule(text_file='r2', license_expression='apache-2.0 OR gpl')
m1 = LicenseMatch(rule=r1, qspan=Span(1, 6), ispan=Span(1, 6))
m2 = LicenseMatch(rule=r2, qspan=Span(1, 6), ispan=Span(1, 6))
matches = merge_matches([m1, m2])
assert sorted(matches) == sorted([m1, m2])
def test_files_does_filter_contained_matches_of_different_rules_with_same_licensing(self):
r1 = Rule(text_file='r1', license_expression='apache-2.0 OR gpl')
r2 = Rule(text_file='r2', license_expression='apache-2.0 OR gpl')
m1 = LicenseMatch(rule=r1, qspan=Span(1, 6), ispan=Span(1, 6))
m2 = LicenseMatch(rule=r2, qspan=Span(1, 6), ispan=Span(1, 6))
matches, discarded = filter_contained_matches([m1, m2])
assert matches == [m2]
assert discarded == [m1]
def test_merge_does_not_merge_overlapping_matches_with_same_licensings(self):
r1 = Rule(text_file='r1', license_expression='apache-2.0 OR gpl')
r2 = Rule(text_file='r2', license_expression='apache-2.0 OR gpl')
overlap = LicenseMatch(rule=r1, qspan=Span(0, 5), ispan=Span(0, 5))
same_span1 = LicenseMatch(rule=r1, qspan=Span(1, 6), ispan=Span(1, 6))
same_span2 = LicenseMatch(rule=r2, qspan=Span(1, 6), ispan=Span(1, 6))
result = merge_matches([overlap, same_span1, same_span2])
expected = [
LicenseMatch(rule=r1, qspan=Span(0, 6), ispan=Span(0, 6)),
LicenseMatch(rule=r2, qspan=Span(1, 6), ispan=Span(1, 6)),
]
assert sorted(result) == sorted(expected)
def test_filter_contained_matches_only_filter_contained_matches_with_same_licensings(self):
r1 = Rule(text_file='r1', license_expression='apache-2.0 OR gpl')
r2 = Rule(text_file='r2', license_expression='apache-2.0 OR gpl')
overlap = LicenseMatch(rule=r1, qspan=Span(0, 5), ispan=Span(0, 5))
same_span1 = LicenseMatch(rule=r1, qspan=Span(1, 6), ispan=Span(1, 6))
same_span2 = LicenseMatch(rule=r2, qspan=Span(1, 6), ispan=Span(1, 6))
matches, discarded = filter_contained_matches([overlap, same_span1, same_span2])
assert matches == [overlap, same_span1]
assert discarded
def test_filter_overlapping_matches_does_filter_overlapping_matches_with_same_licensings(self):
r1 = Rule(text_file='r1', license_expression='apache-2.0 OR gpl')
r2 = Rule(text_file='r2', license_expression='apache-2.0 OR gpl')
overlap = LicenseMatch(rule=r1, qspan=Span(0, 5), ispan=Span(0, 5))
same_span1 = LicenseMatch(rule=r1, qspan=Span(1, 6), ispan=Span(1, 6))
same_span2 = LicenseMatch(rule=r2, qspan=Span(1, 6), ispan=Span(1, 6))
matches, discarded = filter_overlapping_matches([overlap, same_span1, same_span2])
assert matches == [overlap]
assert discarded
def test_filter_contained_matches_prefers_longer_overlapping_matches(self):
r1 = Rule(text_file='r1', license_expression='apache-2.0 OR gpl')
r2 = Rule(text_file='r2', license_expression='apache-2.0 OR gpl')
overlap = LicenseMatch(rule=r1, qspan=Span(0, 5), ispan=Span(0, 5))
same_span1 = LicenseMatch(rule=r1, qspan=Span(1, 6), ispan=Span(1, 6))
same_span2 = LicenseMatch(rule=r2, qspan=Span(1, 8), ispan=Span(1, 8))
matches, discarded = filter_contained_matches([overlap, same_span1, same_span2])
assert matches == [overlap, same_span2]
assert discarded
def test_filter_overlapping_matches_prefers_longer_overlapping_matches(self):
r1 = Rule(text_file='r1', license_expression='apache-2.0 OR gpl')
r2 = Rule(text_file='r2', license_expression='apache-2.0 OR gpl')
overlap = LicenseMatch(rule=r1, qspan=Span(0, 5), ispan=Span(0, 5))
same_span1 = LicenseMatch(rule=r1, qspan=Span(1, 6), ispan=Span(1, 6))
same_span2 = LicenseMatch(rule=r2, qspan=Span(1, 8), ispan=Span(1, 8))
matches, discarded = filter_overlapping_matches([overlap, same_span1, same_span2])
assert matches == [same_span2]
assert discarded
def test_merge_contiguous_touching_matches_in_sequence(self):
r1 = Rule(stored_text='r1', license_expression='apache-2.0 OR gpl')
m1 = LicenseMatch(rule=r1, qspan=Span(0, 2), ispan=Span(0, 2))
m2 = LicenseMatch(rule=r1, qspan=Span(3, 6), ispan=Span(3, 6))
result = merge_matches([m1, m2])
match = result[0]
assert match == LicenseMatch(rule=r1, qspan=Span(0, 6), ispan=Span(0, 6))
def test_merge_contiguous_contained_matches(self):
r1 = Rule(text_file='r1', license_expression='apache-2.0 OR gpl')
m1 = LicenseMatch(rule=r1, qspan=Span(0, 2), ispan=Span(0, 2))
m2 = LicenseMatch(rule=r1, qspan=Span(3, 6), ispan=Span(3, 6))
m5 = LicenseMatch(rule=r1, qspan=Span(7, 8), ispan=Span(7, 8))
result = merge_matches([m1, m2, m5])
assert result == [LicenseMatch(rule=r1, qspan=Span(0, 8), ispan=Span(0, 8))]
def test_merge_should_not_merge_repeated_matches_out_of_sequence(self):
rule = Rule(text_file='gpl-2.0_49.RULE', license_expression=u'gpl-2.0')
rule.rid = 2615
m1 = LicenseMatch(rule=rule, matcher='chunk1', qspan=Span(0, 7), ispan=Span(0, 7))
m2 = LicenseMatch(rule=rule, matcher='chunk2', qspan=Span(8, 15), ispan=Span(0, 7))
m3 = LicenseMatch(rule=rule, matcher='chunk3', qspan=Span(16, 23), ispan=Span(0, 7))
result = merge_matches([m1, m2, m3])
assert result == [m1, m2, m3]
def test_merge_merges_contained_and_overlapping_match(self):
r1 = Rule(text_file='r1', license_expression='apache-2.0 OR gpl')
m1 = LicenseMatch(rule=r1, qspan=Span(0, 5), ispan=Span(0, 5))
contained = LicenseMatch(rule=r1, qspan=Span(1, 4), ispan=Span(1, 4))
overlapping = LicenseMatch(rule=r1, qspan=Span(1, 6), ispan=Span(1, 6))
assert contained in overlapping
assert contained in m1
result = merge_matches([m1, contained, overlapping])
expected = [LicenseMatch(rule=r1, qspan=Span(0, 6), ispan=Span(0, 6))]
assert result == expected
def test_merge_does_not_merge_multiple_contained_matches_across_rules(self):
r1 = Rule(text_file='r1', license_expression='apache-2.0 OR gpl')
m1 = LicenseMatch(rule=r1, qspan=Span(0, 5), ispan=Span(0, 5))
r2 = Rule(text_file='r2', license_expression='apache-2.0 OR gpl')
contained1 = LicenseMatch(rule=r2, qspan=Span(1, 2), ispan=Span(1, 2))
r3 = Rule(text_file='r3', license_expression='apache-2.0 OR gpl')
contained2 = LicenseMatch(rule=r3, qspan=Span(3, 4), ispan=Span(3, 4))
r5 = Rule(text_file='r5', license_expression='apache-2.0 OR gpl')
m5 = LicenseMatch(rule=r5, qspan=Span(1, 6), ispan=Span(1, 6))
result = merge_matches([m1, contained1, contained2, m5])
assert sorted(result) == sorted([m1, contained1, contained2, m5])
def test_filter_contained_matches_does_filter_across_rules(self):
r1 = Rule(text_file='r1', license_expression='apache-2.0 OR gpl')
m1 = LicenseMatch(rule=r1, qspan=Span(0, 5), ispan=Span(0, 5))
r2 = Rule(text_file='r2', license_expression='apache-2.0 OR gpl')
contained1 = LicenseMatch(rule=r2, qspan=Span(1, 2), ispan=Span(1, 2))
r3 = Rule(text_file='r3', license_expression='apache-2.0 OR gpl')
contained2 = LicenseMatch(rule=r3, qspan=Span(3, 4), ispan=Span(3, 4))
r5 = Rule(text_file='r5', license_expression='apache-2.0 OR gpl')
m5 = LicenseMatch(rule=r5, qspan=Span(1, 6), ispan=Span(1, 6))
result, _discarded = filter_contained_matches([m1, contained1, contained2, m5])
assert result == [m1, m5]
def test_filter_overlapping_matches_does_not_filter_multiple_contained_matches_across_rules(self):
r1 = Rule(text_file='r1', license_expression='apache-2.0 OR gpl')
m1 = LicenseMatch(rule=r1, qspan=Span(0, 5), ispan=Span(0, 5))
r2 = Rule(text_file='r2', license_expression='apache-2.0 OR gpl')
contained1 = LicenseMatch(rule=r2, qspan=Span(1, 2), ispan=Span(1, 2))
r3 = Rule(text_file='r3', license_expression='apache-2.0 OR gpl')
contained2 = LicenseMatch(rule=r3, qspan=Span(3, 4), ispan=Span(3, 4))
r5 = Rule(text_file='r5', license_expression='apache-2.0 OR gpl')
m5 = LicenseMatch(rule=r5, qspan=Span(1, 6), ispan=Span(1, 6))
result, _discarded = filter_overlapping_matches([m1, contained1, contained2, m5])
assert result == [m1]
def test_filter_contained_matches_filters_multiple_contained_matches(self):
r1 = Rule(text_file='r1', license_expression='apache-2.0 OR gpl')
m1 = LicenseMatch(rule=r1, qspan=Span(0, 5), ispan=Span(0, 5))
r2 = Rule(text_file='r2', license_expression='apache-2.0 OR gpl')
contained1 = LicenseMatch(rule=r2, qspan=Span(1, 2), ispan=Span(1, 2))
r3 = Rule(text_file='r3', license_expression='apache-2.0 OR gpl')
contained2 = LicenseMatch(rule=r3, qspan=Span(3, 4), ispan=Span(3, 4))
r5 = Rule(text_file='r5', license_expression='apache-2.0 OR gpl')
m5 = LicenseMatch(rule=r5, qspan=Span(1, 6), ispan=Span(1, 6))
matches, discarded = filter_contained_matches([m1, contained1, contained2, m5])
assert matches == [m1, m5]
assert sorted(discarded) == sorted([contained1, contained2, ])
def test_filter_overlapping_matches_filters_multiple_contained_matches(self):
r1 = Rule(text_file='r1', license_expression='apache-2.0 OR gpl')
m1 = LicenseMatch(rule=r1, qspan=Span(0, 5), ispan=Span(0, 5))
r2 = Rule(text_file='r2', license_expression='apache-2.0 OR gpl')
contained1 = LicenseMatch(rule=r2, qspan=Span(1, 2), ispan=Span(1, 2))
r3 = Rule(text_file='r3', license_expression='apache-2.0 OR gpl')
contained2 = LicenseMatch(rule=r3, qspan=Span(3, 4), ispan=Span(3, 4))
r5 = Rule(text_file='r5', license_expression='apache-2.0 OR gpl')
m5 = LicenseMatch(rule=r5, qspan=Span(1, 6), ispan=Span(1, 6))
matches, discarded = filter_overlapping_matches([m1, contained1, contained2, m5])
assert matches == [m1]
assert sorted(discarded) == sorted([m5, contained1, contained2, ])
def test_merge_does_not_merge_matches_with_same_spans_if_licenses_are_identical_but_rule_differ(self):
r1 = Rule(text_file='r1', license_expression='apache-2.0')
m1 = LicenseMatch(rule=r1, qspan=Span(0, 2), ispan=Span(0, 2))
m5 = LicenseMatch(rule=r1, qspan=Span(1, 6), ispan=Span(1, 6))
r2 = Rule(text_file='r2', license_expression='apache-2.0')
m2 = LicenseMatch(rule=r2, qspan=Span(0, 2), ispan=Span(0, 2))
matches = merge_matches([m1, m2, m5])
assert sorted(matches) == sorted([LicenseMatch(rule=r1, qspan=Span(0, 6), ispan=Span(0, 6)), m2])
def test_filter_contained_matches_filters_matches_with_same_spans_if_licenses_are_identical_but_rule_differ(self):
r1 = Rule(text_file='r1', license_expression='apache-2.0')
m1 = LicenseMatch(rule=r1, qspan=Span(0, 2), ispan=Span(0, 2))
m5 = LicenseMatch(rule=r1, qspan=Span(1, 6), ispan=Span(1, 6))
r2 = Rule(text_file='r2', license_expression='apache-2.0')
m2 = LicenseMatch(rule=r2, qspan=Span(0, 2), ispan=Span(0, 2))
matches, discarded = filter_contained_matches([m1, m2, m5])
assert matches == [m1, m5]
assert discarded
def test_filter_overlapping_matches_filters_matches_with_same_spans_if_licenses_are_identical_but_rule_differ(self):
r1 = Rule(text_file='r1', license_expression='apache-2.0')
m1 = LicenseMatch(rule=r1, qspan=Span(0, 2), ispan=Span(0, 2))
m5 = LicenseMatch(rule=r1, qspan=Span(1, 6), ispan=Span(1, 6))
r2 = Rule(text_file='r2', license_expression='apache-2.0')
m2 = LicenseMatch(rule=r2, qspan=Span(0, 2), ispan=Span(0, 2))
matches, discarded = filter_overlapping_matches([m1, m2, m5])
assert matches == [m5]
assert discarded
def test_merge_then_filter_matches_with_same_spans_if_licenses_are_identical_but_rule_differ(self):
r1 = Rule(text_file='r1', license_expression='apache-2.0')
m1 = LicenseMatch(rule=r1, qspan=Span(0, 2), ispan=Span(0, 2))
m5 = LicenseMatch(rule=r1, qspan=Span(1, 6), ispan=Span(1, 6))
r2 = Rule(text_file='r2', license_expression='apache-2.0')
m2 = LicenseMatch(rule=r2, qspan=Span(0, 2), ispan=Span(0, 2))
matches = merge_matches([m1, m2, m5])
matches, discarded = filter_contained_matches(matches)
assert matches == [LicenseMatch(rule=r1, qspan=Span(0, 6), ispan=Span(0, 6))]
assert discarded
def test_merge_overlapping_matches(self):
r1 = Rule(text_file='r1', license_expression='apache-2.0')
m1 = LicenseMatch(rule=r1, qspan=Span(0, 2), ispan=Span(0, 2))
m2 = LicenseMatch(rule=r1, qspan=Span(1, 6), ispan=Span(1, 6))
matches = merge_matches([m1, m2])
assert matches == [LicenseMatch(rule=r1, qspan=Span(0, 6), ispan=Span(0, 6))]
def test_merge_does_not_merges_matches_with_same_spans_if_licenses_are_the_same_but_have_different_licenses_ordering(self):
r1 = Rule(text_file='r1', license_expression='apache-2.0 OR gpl')
m1 = LicenseMatch(rule=r1, qspan=Span(0, 2), ispan=Span(0, 2))
m5 = LicenseMatch(rule=r1, qspan=Span(1, 6), ispan=Span(1, 6))
r2 = Rule(text_file='r2', license_expression='gpl OR apache-2.0')
m2 = LicenseMatch(rule=r2, qspan=Span(0, 2), ispan=Span(0, 2))
result = merge_matches([m1, m2, m5])
assert sorted(result) == sorted([LicenseMatch(rule=r1, qspan=Span(0, 6), ispan=Span(0, 6)), m2])
def test_merge_does_not_merges_matches_with_same_spans_if_rules_are_different(self):
r1 = Rule(text_file='r1', license_expression='apache-2.0 OR gpl')
m1 = LicenseMatch(rule=r1, qspan=Span(0, 2), ispan=Span(0, 2))
m5 = LicenseMatch(rule=r1, qspan=Span(1, 6), ispan=Span(1, 6))
r2 = Rule(text_file='r2', license_expression='apache-2.0 OR gpl')
m2 = LicenseMatch(rule=r2, qspan=Span(0, 2), ispan=Span(0, 2))
result = merge_matches([m1, m2, m5])
assert sorted(result) == sorted([LicenseMatch(rule=r1, qspan=Span(0, 6), ispan=Span(0, 6)), m2])
def test_merge_merges_duplicate_matches(self):
r1 = Rule(text_file='r1', license_expression='apache-2.0')
m1 = LicenseMatch(rule=r1, qspan=Span(0, 8), ispan=Span(0, 8))
m2 = LicenseMatch(rule=r1, qspan=Span(0, 8), ispan=Span(0, 8))
matches = merge_matches([m1, m2])
assert (matches == [m1]) or (matches == [m2])
def test_merge_does_not_merge_overlapping_matches_in_sequence_with_assymetric_overlap(self):
r1 = Rule(text_file='r1', license_expression=u'lgpl-2.0-plus')
# ---> merge_matches: current: LicenseMatch<'3-seq', lines=(9, 28), 'lgpl-2.0-plus_9.RULE', u'lgpl-2.0-plus', choice=False, score=87.5, len=126, ilen=126, hilen=20, rlen=144, qreg=(50, 200), ireg=(5, 142), qspan=Span(50, 90)|Span(92, 142)|Span(151, 182)|Span(199, 200), ispan=Span(5, 21)|Span(23, 46)|Span(48, 77)|Span(79, 93)|Span(95, 100)|Span(108, 128)|Span(130, 142), hispan=Span(10)|Span(14)|Span(18)|Span(24)|Span(27)|Span(52)|Span(57)|Span(61)|Span(65, 66)|Span(68)|Span(70)|Span(80)|Span(88)|Span(96)|Span(111)|Span(113)|Span(115)|Span(131)|Span(141)>
# ---> merge_matches: next: LicenseMatch<'2-aho', lines=(28, 44), 'lgpl-2.0-plus_9.RULE', u'lgpl-2.0-plus', choice=False, score=100.0, len=144, ilen=144, hilen=21, rlen=144, qreg=(198, 341), ireg=(0, 143), qspan=Span(198, 341), ispan=Span(0, 143), hispan=Span(1)|Span(10)|Span(14)|Span(18)|Span(24)|Span(27)|Span(52)|Span(57)|Span(61)|Span(65, 66)|Span(68)|Span(70)|Span(80)|Span(88)|Span(96)|Span(111)|Span(113)|Span(115)|Span(131)|Span(141)>
# ---> ###merge_matches: next overlaps in sequence current, merged as new: LicenseMatch<'3-seq 2-aho', lines=(9, 44), 'lgpl-2.0-plus_9.RULE', u'lgpl-2.0-plus', choice=False, score=100.0, len=268, hilen=21, rlen=144, qreg=(50, 341), ireg=(0, 143), qspan=Span(50, 90)|Span(92, 142)|Span(151, 182)|Span(198, 341), ispan=Span(0, 143), his
# ---> merge_matches: current: len=126, hilen=20, rlen=144, qreg=(50, 200), ireg=(5, 142)
# ---> merge_matches: next: len=144, hilen=21, rlen=144, qreg=(198, 341), ireg=(0, 143)
m1 = LicenseMatch(
rule=r1,
qspan=Span(50, 90) | Span(92, 142) | Span(151, 182) | Span(199, 200),
ispan=
Span(5, 21) | Span(23, 46) | Span(48, 77) | Span(79, 93) |
Span(95, 100) | Span(108, 128) | Span(130, 142),
hispan=
Span(10) | Span(14) | Span(18) | Span(24) | Span(27) | Span(52) |
Span(57) | Span(61) | Span(65, 66) | Span(68) | Span(70) | Span(80) |
Span(88) | Span(96) | Span(111) | Span(113) | Span(115) | Span(131) |
Span(141),
)
m2 = LicenseMatch(
rule=r1,
qspan=Span(198, 341),
ispan=Span(0, 143),
hispan=
Span(1) | Span(10) | Span(14) | Span(18) | Span(24) | Span(27) |
Span(52) | Span(57) | Span(61) | Span(65, 66) | Span(68) | Span(70) |
Span(80) | Span(88) | Span(96) | Span(111) | Span(113) | Span(115) |
Span(131) | Span(141))
matches = merge_matches([m1, m2])
assert matches == [m1, m2]
class TestLicenseMatchFilter(FileBasedTesting):
test_data_dir = TEST_DATA_DIR
def test_filter_contained_matches_matches_filters_multiple_nested_contained_matches_and_large_overlapping(self):
r1 = Rule(text_file='r1', license_expression='apache-2.0 OR gpl')
m1 = LicenseMatch(rule=r1, qspan=Span(0, 5), ispan=Span(0, 5))
large_overlap = LicenseMatch(rule=r1, qspan=Span(1, 6), ispan=Span(1, 6))
contained = LicenseMatch(rule=r1, qspan=Span(1, 4), ispan=Span(1, 4))
in_contained = LicenseMatch(rule=r1, qspan=Span(2, 3), ispan=Span(2, 3))
result, discarded = filter_contained_matches([m1, contained, in_contained, large_overlap])
assert result == [m1, large_overlap]
assert discarded == [contained, in_contained]
def test_filter_overlapping_matches_matches_filters_multiple_nested_contained_matches_and_large_overlapping(self):
r1 = Rule(text_file='r1', license_expression='apache-2.0 OR gpl')
m1 = LicenseMatch(rule=r1, qspan=Span(0, 5), ispan=Span(0, 5))
large_overlap = LicenseMatch(rule=r1, qspan=Span(1, 6), ispan=Span(1, 6))
contained = LicenseMatch(rule=r1, qspan=Span(1, 4), ispan=Span(1, 4))
in_contained = LicenseMatch(rule=r1, qspan=Span(2, 3), ispan=Span(2, 3))
result, discarded = filter_overlapping_matches([m1, contained, in_contained, large_overlap])
assert result == [m1]
assert discarded
def test_filter_matches_filters_non_contiguous_or_overlapping__but_contained_matches(self):
r1 = Rule(text_file='r1', license_expression='apache-2.0 OR gpl')
m1 = LicenseMatch(rule=r1, qspan=Span(1, 2), ispan=Span(1, 2))
m2 = LicenseMatch(rule=r1, qspan=Span(3, 6), ispan=Span(3, 6))
m3 = LicenseMatch(rule=r1, qspan=Span(1, 6), ispan=Span(1, 6))
m4 = LicenseMatch(rule=r1, qspan=Span(0, 7), ispan=Span(0, 7))
m5 = LicenseMatch(rule=r1, qspan=Span(1, 6), ispan=Span(1, 6))
result, discarded = filter_contained_matches([m1, m2, m3, m4, m5])
assert result == [m4]
assert discarded
def test_filter_matches_filters_non_contiguous_or_overlapping_contained_matches_with_touching_boundaries(self):
r1 = Rule(text_file='r1', license_expression='apache-2.0 OR gpl')
m1 = LicenseMatch(rule=r1, qspan=Span(0, 2), ispan=Span(0, 2))
r2 = Rule(text_file='r2', license_expression='apache-2.0 OR gpl')
m2 = LicenseMatch(rule=r2, qspan=Span(3, 7), ispan=Span(3, 7))
r3 = Rule(text_file='r3', license_expression='apache-2.0 OR gpl')
m3 = LicenseMatch(rule=r3, qspan=Span(0, 6), ispan=Span(0, 6))
r6 = Rule(text_file='r6', license_expression='apache-2.0 OR gpl')
m6 = LicenseMatch(rule=r6, qspan=Span(1, 7), ispan=Span(1, 7))
r5 = Rule(text_file='r5', license_expression='apache-2.0 OR gpl')
m5 = LicenseMatch(rule=r5, qspan=Span(1, 6), ispan=Span(1, 6))
r4 = Rule(text_file='r4', license_expression='apache-2.0 OR gpl')
m4 = LicenseMatch(rule=r4, qspan=Span(0, 7), ispan=Span(0, 7))
result, discarded = filter_contained_matches([m1, m2, m3, m4, m5, m6])
assert result == [m4]
assert discarded
def test_filter_contained_matches_matches_does_filter_matches_with_contained_spans_if_licenses_are_different(self):
r1 = Rule(text_file='r1', license_expression='apache-2.0')
m1 = LicenseMatch(rule=r1, qspan=Span(0, 2), ispan=Span(0, 2))
r2 = Rule(text_file='r2', license_expression='apache-2.0')
m2 = LicenseMatch(rule=r2, qspan=Span(1, 6), ispan=Span(1, 6))
r3 = Rule(text_file='r3', license_expression='apache-1.1')
m3 = LicenseMatch(rule=r3, qspan=Span(0, 2), ispan=Span(0, 2))
matches, discarded = filter_contained_matches([m1, m2, m3])
assert matches == [m1, m2]
assert discarded
def test_filter_overlapping_matches_matches_does_filter_matches_with_contained_spans_if_licenses_are_different(self):
r1 = Rule(text_file='r1', license_expression='apache-2.0')
m1 = LicenseMatch(rule=r1, qspan=Span(0, 2), ispan=Span(0, 2))
r2 = Rule(text_file='r2', license_expression='apache-2.0')
m2 = LicenseMatch(rule=r2, qspan=Span(1, 6), ispan=Span(1, 6))
r3 = Rule(text_file='r3', license_expression='apache-1.1')
m3 = LicenseMatch(rule=r3, qspan=Span(0, 2), ispan=Span(0, 2))
matches, discarded = filter_overlapping_matches([m1, m2, m3])
assert matches == [m2]
assert discarded
def test_filter_overlapping_matches_matches_filters_matches_with_medium_overlap_only_if_license_are_the_same(self):
r1 = Rule(text_file='r1', license_expression='apache-1.1')
m1 = LicenseMatch(rule=r1, qspan=Span(0, 10), ispan=Span(0, 10))
m2 = LicenseMatch(rule=r1, qspan=Span(3, 11), ispan=Span(3, 11))
r2 = Rule(text_file='r2', license_expression='gpl OR apache-2.0')
m3 = LicenseMatch(rule=r2, qspan=Span(7, 15), ispan=Span(7, 15))
result, discarded = filter_overlapping_matches([m1, m2, m3])
assert sorted(result) == sorted([m1, m3])
assert discarded
def test_filter_matches_handles_interlaced_matches_with_overlap_and_same_license(self):
rule_dir = self.get_test_loc('match_filter/rules')
idx = index.LicenseIndex(load_rules(rule_dir))
rules = {r.identifier: r for r in idx.rules_by_rid}
query_loc = self.get_test_loc('match_filter/query')
matches = idx.match(location=query_loc)
expected = [
# filtered: LicenseMatch(matcher='3-seq', rule=rules['rule1.RULE'], qspan=Span(4, 47) | Span(50, 59), ispan=Span(1, 53)),
LicenseMatch(matcher='2-aho', rule=rules['rule2.RULE'], qspan=Span(24, 85), ispan=Span(0, 61)),
]
assert matches == expected
def test_filter_contained_matches_matches_filters_matches_does_not_discard_non_overlapping(self):
r1 = Rule(text_file='r1', license_expression='apache-1.1')
r2 = Rule(text_file='r2', license_expression='gpl OR apache-2.0')
r3 = Rule(text_file='r3', license_expression='gpl')
# we have these matches
# 1. ABC
# 2. ABCDEDFG
# 3. DEFCGJLJLJKLJJLKJLJJJLJLJLJJL
# we do not want 1. to be discarded in the final
m1 = LicenseMatch(rule=r1, qspan=Span(0, 5), ispan=Span(0, 5))
m2 = LicenseMatch(rule=r2, qspan=Span(0, 40), ispan=Span(0, 40))
m3 = LicenseMatch(rule=r3, qspan=Span(6, 120), ispan=Span(6, 120))
result, discarded = filter_contained_matches([m2, m1, m3])
assert result == [m2, m3]
assert discarded == [m1]
def test_filter_overlapping_matches_matches_filters_matches_does_not_discard_non_overlapping(self):
r1 = Rule(text_file='r1', license_expression='apache-1.1')
r2 = Rule(text_file='r2', license_expression='gpl OR apache-2.0')
r3 = Rule(text_file='r3', license_expression='gpl')
# we have these matches
# 1. ABC
# 2. ABCDEDFG
# 3. DEFCGJLJLJKLJJLKJLJJJLJLJLJJL
# we do not want 1. to be discarded in the final
m1 = LicenseMatch(rule=r1, qspan=Span(0, 5), ispan=Span(0, 5))
m2 = LicenseMatch(rule=r2, qspan=Span(0, 40), ispan=Span(0, 40))
m3 = LicenseMatch(rule=r3, qspan=Span(6, 120), ispan=Span(6, 120))
result, discarded = filter_overlapping_matches([m2, m1, m3])
assert result == [m3]
assert discarded == [m1, m2]
result, discarded = restore_non_overlapping(result, discarded)
assert result == [m1]
assert discarded == [m2]
def test_filter_key_phrases_keeps_matches_where_key_phrase_spans_is_fully_container_in_ispan(self):
idx = index.LicenseIndex()
query = Query(query_string="Lorum ipsum", idx=idx)
r1 = Rule(text_file='r1', license_expression='apache-1.1', key_phrase_spans=[Span(2, 4)])
match_key_phrase_fully_contained = LicenseMatch(rule=r1, query=query, qspan=Span(0, 5), ispan=Span(0, 5))
match_key_phrase_fully_outside = LicenseMatch(rule=r1, query=query, qspan=Span(5, 8), ispan=Span(5, 8))
match_key_phrase_partially_contained = LicenseMatch(rule=r1, query=query, qspan=Span(0, 3), ispan=Span(0, 2))
match_key_phrase_fully_containing = LicenseMatch(rule=r1, query=query, qspan=Span(3), ispan=Span(3))
kept, discarded = filter_matches_missing_key_phrases([
match_key_phrase_fully_contained,
match_key_phrase_fully_outside,
match_key_phrase_partially_contained,
match_key_phrase_fully_containing
])
assert kept == [
match_key_phrase_fully_contained
]
assert discarded == [
match_key_phrase_fully_outside,
match_key_phrase_partially_contained,
match_key_phrase_fully_containing
]
def test_filter_key_phrases_discards_matches_where_qspan_intersects_with_unknown_or_stopwords(self):
idx = index.LicenseIndex()
query = Query(query_string="Lorum ipsum", idx=idx)
query.unknowns_by_pos = {12: 1}
query.stopwords_by_pos = {23: 1}
r1 = Rule(text_file='r1', license_expression='apache-1.1', key_phrase_spans=[Span(2, 4)])
match_key_phrase_fully_contained = LicenseMatch(rule=r1, query=query, qspan=Span(0, 5), ispan=Span(0, 5))
match_qspan_intersects_with_unknowns = LicenseMatch(rule=r1, query=query, qspan=Span(10, 15), ispan=Span(0, 5))
match_qspan_intersects_with_stopwords = LicenseMatch(rule=r1, query=query, qspan=Span(20, 25), ispan=Span(0, 5))
kept, discarded = filter_matches_missing_key_phrases([
match_key_phrase_fully_contained,
match_qspan_intersects_with_unknowns,
match_qspan_intersects_with_stopwords,
])
assert kept == [
match_key_phrase_fully_contained
]
assert discarded == [
match_qspan_intersects_with_unknowns,
match_qspan_intersects_with_stopwords
]
def test_filter_key_phrases_discards_matches_where_key_phrase_is_interruped_in_qspan(self):
idx = index.LicenseIndex()
query = Query(query_string="Lorum ipsum", idx=idx)
query.unknowns_by_pos = {}
r1 = Rule(
text_file='r1',
license_expression='apache-1.1',
key_phrase_spans=[Span(12, 14)],
)
qspan_ispan_same_pos = LicenseMatch(
rule=r1, query=query,
qspan=Span(10, 15), ispan=Span(10, 15)
)
qspan_with_offset = LicenseMatch(
rule=r1, query=query,
qspan=Span(20, 25), ispan=Span(10, 15)
)
qspan_non_contiguous = LicenseMatch(
rule=r1, query=query,
qspan=Span([20, 21, 22, 23, 25]), ispan=Span(10, 15)
)
kept, discarded = filter_matches_missing_key_phrases([
qspan_ispan_same_pos,
qspan_with_offset,
qspan_non_contiguous
])
assert kept == [
qspan_ispan_same_pos,
qspan_with_offset
]
assert discarded == [
qspan_non_contiguous,
]
def test_get_matching_regions_15_words(self):
rule_dir = self.get_test_loc('match_regions/rules')
idx = index.LicenseIndex(load_rules(rule_dir))
query_string = '''GPLv2
This source code is licensed under the MIT
GPLv2
under both the GPLv2 and Apache 2.0 License
the under both the under both the under both the under both the under both
GPL v2 license
This source code is licensed under the MIT
'''
matches = idx.match(query_string=query_string)
matched_rules = [m.rule.identifier for m in matches]
expected_rules = [
'gpl-2.0_bare_single_word.RULE',
'mit_101.RULE',
'gpl-2.0_bare_single_word.RULE',
'gpl-2.0_or_apache-2.0_2.RULE',
'gpl-2.0_bare_single_word2.RULE',
'mit_101.RULE',
]
assert matched_rules == expected_rules
regions = get_matching_regions(matches)
expected_regions = [Span(0, 18), Span(34, 44)]
assert regions == expected_regions
assert matches[0].qspan in regions[0]
assert matches[1].qspan in regions[0]
assert matches[2].qspan in regions[0]
assert matches[3].qspan in regions[0]
assert matches[4].qspan in regions[1]
assert matches[5].qspan in regions[1]
def test_get_matching_regions_10_words_are_not_enough(self):
rule_dir = self.get_test_loc('match_regions/rules')
idx = index.LicenseIndex(load_rules(rule_dir))
query_string = '''GPLv2
This source code is licensed under the MIT
GPLv2
under both the GPLv2 and Apache 2.0 License
the under both the under foo bar both the under
GPL v2 license
This source code is licensed under the MIT
'''
matches = idx.match(query_string=query_string)
matched_rules = [m.rule.identifier for m in matches]
expected_rules = [
'gpl-2.0_bare_single_word.RULE',
'mit_101.RULE',
'gpl-2.0_bare_single_word.RULE',
'gpl-2.0_or_apache-2.0_2.RULE',
'gpl-2.0_bare_single_word2.RULE',
'mit_101.RULE',
]
assert matched_rules == expected_rules
regions = get_matching_regions(matches)
expected_regions = [Span(0, 37)]
assert regions == expected_regions
def test_get_matching_regions_11_words_are_enough(self):
rule_dir = self.get_test_loc('match_regions/rules')
idx = index.LicenseIndex(load_rules(rule_dir))
query_string = '''GPLv2
This source code is licensed under the MIT
GPLv2
under both the GPLv2 and Apache 2.0 License
the under both the under both the under both the under
GPL v2 license
This source code is licensed under the MIT
'''
matches = idx.match(query_string=query_string)
matched_rules = [m.rule.identifier for m in matches]
expected_rules = [
'gpl-2.0_bare_single_word.RULE',
'mit_101.RULE',
'gpl-2.0_bare_single_word.RULE',
'gpl-2.0_or_apache-2.0_2.RULE',
'gpl-2.0_bare_single_word2.RULE',
'mit_101.RULE',
]
assert matched_rules == expected_rules
regions = get_matching_regions(matches)
expected_regions = [Span(0, 18), Span(30, 40)]
assert regions == expected_regions
assert matches[0].qspan in regions[0]
assert matches[1].qspan in regions[0]
assert matches[2].qspan in regions[0]
assert matches[3].qspan in regions[0]
assert matches[4].qspan in regions[1]
assert matches[5].qspan in regions[1]
def test_get_matching_regions_2_lines_are_not_enough(self):
rule_dir = self.get_test_loc('match_regions/rules')
idx = index.LicenseIndex(load_rules(rule_dir))
query_string = '''GPLv2
This source code is licensed under the MIT
GPLv2
under both the GPLv2 and Apache 2.0 License
one
two
GPL v2 license
This source code is licensed under the MIT
'''
matches = idx.match(query_string=query_string)
matched_rules = [m.rule.identifier for m in matches]
expected_rules = [
'gpl-2.0_bare_single_word.RULE',
'mit_101.RULE',
'gpl-2.0_bare_single_word.RULE',
'gpl-2.0_or_apache-2.0_2.RULE',
'gpl-2.0_bare_single_word2.RULE',
'mit_101.RULE',
]
assert matched_rules == expected_rules
regions = get_matching_regions(matches)
expected_regions = [Span(0, 29)]
assert regions == expected_regions
def test_get_matching_regions_2_lines_with_10_words_are_enough(self):
rule_dir = self.get_test_loc('match_regions/rules')
idx = index.LicenseIndex(load_rules(rule_dir))
query_string = '''GPLv2
This source code is licensed under the MIT
GPLv2
under both the GPLv2 and Apache 2.0 License
one
two three four five six seven eight nine ten
GPL v2 license
This source code is licensed under the MIT
'''
matches = idx.match(query_string=query_string)
matched_rules = [m.rule.identifier for m in matches]
expected_rules = [
'gpl-2.0_bare_single_word.RULE',
'mit_101.RULE',
'gpl-2.0_bare_single_word.RULE',
'gpl-2.0_or_apache-2.0_2.RULE',
'gpl-2.0_bare_single_word2.RULE',
'mit_101.RULE',
]
assert matched_rules == expected_rules
regions = get_matching_regions(matches)
expected_regions = [Span(0, 29)]
assert regions == expected_regions
def test_get_matching_regions_3_lines_enough(self):
rule_dir = self.get_test_loc('match_regions/rules')
idx = index.LicenseIndex(load_rules(rule_dir))
query_string = '''GPLv2
This source code is licensed under the MIT
GPLv2
under both the GPLv2 and Apache 2.0 License
one
two
three
GPL v2 license
This source code is licensed under the MIT
'''
matches = idx.match(query_string=query_string)
matched_rules = [m.rule.identifier for m in matches]
expected_rules = [
'gpl-2.0_bare_single_word.RULE',
'mit_101.RULE',
'gpl-2.0_bare_single_word.RULE',
'gpl-2.0_or_apache-2.0_2.RULE',
'gpl-2.0_bare_single_word2.RULE',
'mit_101.RULE',
]
assert matched_rules == expected_rules
regions = get_matching_regions(matches)
expected_regions = [Span(0, 18), Span(19, 29)]
assert regions == expected_regions
assert matches[0].qspan in regions[0]
assert matches[1].qspan in regions[0]
assert matches[2].qspan in regions[0]
assert matches[3].qspan in regions[0]
assert matches[4].qspan in regions[1]
assert matches[5].qspan in regions[1]
class TestLicenseMatchScore(FileBasedTesting):
test_data_dir = TEST_DATA_DIR
def test_LicenseMatch_score_100(self):
r1 = Rule(text_file='r1', license_expression='apache-2.0')
r1.relevance = 100
r1.length = 3
m1 = LicenseMatch(rule=r1, qspan=Span(0, 2), ispan=Span(0, 2))
assert m1.score() == 100
def test_LicenseMatch_score_50(self):
r1 = Rule(text_file='r1', license_expression='apache-2.0')
r1.relevance = 50
r1.length = 3
m1 = LicenseMatch(rule=r1, qspan=Span(0, 2), ispan=Span(0, 2))
assert m1.score() == 50
def test_LicenseMatch_score_25_with_stored_relevance(self):
r1 = Rule(text_file='r1', license_expression='apache-2.0')
r1.relevance = 50
r1.length = 6
m1 = LicenseMatch(rule=r1, qspan=Span(0, 2), ispan=Span(0, 2))
# NB we do not have a query here
assert m1.score() == 25
def test_LicenseMatch_score_0(self):
r1 = Rule(text_file='r1', license_expression='apache-2.0')
r1.relevance = 0
r1.length = 6
m1 = LicenseMatch(rule=r1, qspan=Span(), ispan=Span())
assert m1.score() == 0
def test_LicenseMatch_score_0_relevance(self):
r1 = Rule(text_file='r1', license_expression='apache-2.0')
r1.relevance = 0
r1.length = 6
m1 = LicenseMatch(rule=r1, qspan=Span(0, 2), ispan=Span(0, 2))
assert m1.score() == 0
def test_LicenseMatch_score_100_contiguous(self):
r1 = Rule(text_file='r1', license_expression='apache-2.0')
r1.relevance = 100
r1.length = 42
m1 = LicenseMatch(rule=r1, qspan=Span(0, 41), ispan=Span(0, 41))
assert m1.score() == 100
def test_LicenseMatch_score_100_non_contiguous(self):
r1 = Rule(text_file='r1', license_expression='apache-2.0')
r1.relevance = 100
r1.length = 42
m1 = LicenseMatch(rule=r1, qspan=Span(0, 19) | Span(30, 51), ispan=Span(0, 41))
assert m1.score() == 80.77
def test_LicenseMatch_stopwords_are_treated_as_unknown_2484(self):
rules_dir = self.get_test_loc('stopwords/index/rules')
lics_dir = self.get_test_loc('stopwords/index/licenses')
rules = models.get_rules(licenses_data_dir=lics_dir, rules_data_dir=rules_dir)
idx = LicenseIndex(rules)
query_location = self.get_test_loc('stopwords/query.txt')
matches = idx.match(location=query_location)
results = [m.rule.identifier for m in matches]
assert results == ['gpl-1.0.bare.RULE', 'gpl-1.0.bare.RULE', 'gpl-1.0.bare.RULE']
class TestCollectLicenseMatchTexts(FileBasedTesting):
test_data_dir = TEST_DATA_DIR
def test_get_full_matched_text_base(self):
rule_text = u'''
Copyright [[some copyright]]
THIS IS FROM [[THE CODEHAUS]] AND CONTRIBUTORS
IN NO EVENT SHALL [[THE CODEHAUS]] OR ITS CONTRIBUTORS BE LIABLE
EVEN IF ADVISED OF THE [[POSSIBILITY OF SUCH]] DAMAGE
'''
rule = Rule(stored_text=rule_text, license_expression='test')
idx = index.LicenseIndex([rule])
querys = u'''
foobar 45 . Copyright 2003 (C) James. All Rights Reserved.
THIS IS FROM THE CODEHAUS AND CONTRIBUTORS
IN NO EVENT SHALL THE best CODEHAUS OR ITS CONTRIBUTORS BE LIABLE
EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. chabada DAMAGE 12 ABC dasdasda .
'''
result = idx.match(query_string=querys)
assert len(result) == 1
match = result[0]
# Note that there is a trailing space in that string
expected = u"""Copyright [2003] ([C]) [James]. [All] [Rights] [Reserved].
THIS IS FROM THE CODEHAUS AND CONTRIBUTORS
IN NO EVENT SHALL THE [best] CODEHAUS OR ITS CONTRIBUTORS BE LIABLE
EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """
matched_text = u''.join(
get_full_matched_text(match, query_string=querys, idx=idx, _usecache=False))
assert matched_text == expected
expected_nh = u"""Copyright 2003 (C) James. All Rights Reserved.
THIS IS FROM THE CODEHAUS AND CONTRIBUTORS
IN NO EVENT SHALL THE best CODEHAUS OR ITS CONTRIBUTORS BE LIABLE
EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """
matched_text_nh = u''.join(
get_full_matched_text(
match, query_string=querys, idx=idx, _usecache=False, highlight=False))
assert matched_text_nh == expected_nh
expected_origin_text = u"""Copyright 2003 (C) James. All Rights Reserved.
THIS IS FROM THE CODEHAUS AND CONTRIBUTORS
IN NO EVENT SHALL THE best CODEHAUS OR ITS CONTRIBUTORS BE LIABLE
EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """
origin_matched_text = u''.join(get_full_matched_text(
match,
query_string=querys,
idx=idx,
highlight_not_matched=u'%s',
))
assert origin_matched_text == expected_origin_text
def test_get_full_matched_text(self):
rule_text = u'''
Copyright [[some copyright]]
THIS IS FROM [[THE CODEHAUS]] AND CONTRIBUTORS
IN NO EVENT SHALL [[THE CODEHAUS]] OR ITS CONTRIBUTORS BE LIABLE
EVEN IF ADVISED OF THE [[POSSIBILITY OF SUCH]] DAMAGE
'''
rule = Rule(stored_text=rule_text, license_expression='test')
idx = index.LicenseIndex([rule])
querys = u'''
foobar 45 Copyright 2003 (C) James. All Rights Reserved.
THIS IS FROM THE CODEHAUS AND CONTRIBUTORS
IN NO EVENT SHALL THE best CODEHAUS OR ITS CONTRIBUTORS BE LIABLE
EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. chabada DAMAGE 12 ABC
'''
result = idx.match(query_string=querys)
assert len(result) == 1
match = result[0]
# Note that there is a trailing space in that string
expected = u"""Copyright [2003] ([C]) [James]. [All] [Rights] [Reserved].
THIS IS FROM THE CODEHAUS AND CONTRIBUTORS
IN NO EVENT SHALL THE [best] CODEHAUS OR ITS CONTRIBUTORS BE LIABLE
EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """
matched_text = u''.join(get_full_matched_text(match, query_string=querys, idx=idx, _usecache=False))
assert matched_text == expected
# the text is finally rstripped
matched_text = match.matched_text(_usecache=False)
assert matched_text == expected.rstrip()
# test again using some HTML with tags
# Note that there is a trailing space in that string
expected = u"""Copyright <br>2003</br> (<br>C</br>) <br>James</br>. <br>All</br> <br>Rights</br> <br>Reserved</br>.
THIS IS FROM THE CODEHAUS AND CONTRIBUTORS
IN NO EVENT SHALL THE <br>best</br> CODEHAUS OR ITS CONTRIBUTORS BE LIABLE
EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """
matched_text = u''.join(get_full_matched_text(
match, query_string=querys, idx=idx, highlight_not_matched=u'<br>%s</br>', _usecache=False))
assert matched_text == expected
# test again using whole_lines
expected = u""" foobar 45 Copyright 2003 (C) James. All Rights Reserved.
THIS IS FROM THE CODEHAUS AND CONTRIBUTORS
IN NO EVENT SHALL THE best CODEHAUS OR ITS CONTRIBUTORS BE LIABLE
EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. chabada DAMAGE 12 ABC\n"""
matched_text = u''.join(get_full_matched_text(
match, query_string=querys, idx=idx, highlight_not_matched=u'%s', whole_lines=True))
assert matched_text == expected
def test_get_full_matched_text_does_not_munge_underscore(self):
rule_text = 'MODULE_LICENSE_GPL'
rule = Rule(stored_text=rule_text, license_expression='test')
idx = index.LicenseIndex([rule])
querys = 'MODULE_LICENSE_GPL'
result = idx.match(query_string=querys)
assert len(result) == 1
match = result[0]
expected = 'MODULE_LICENSE_GPL'
matched_text = u''.join(get_full_matched_text(match, query_string=querys, idx=idx, _usecache=False))
assert matched_text == expected
def test_get_full_matched_text_does_not_munge_plus(self):
rule_text = 'MODULE_LICENSE_GPL+ +'
rule = Rule(stored_text=rule_text, license_expression='test')
idx = index.LicenseIndex([rule])
querys = 'MODULE_LICENSE_GPL+ +'
result = idx.match(query_string=querys)
assert len(result) == 1
match = result[0]
expected = 'MODULE_LICENSE_GPL+ +\n'
matched_text = u''.join(get_full_matched_text(match, query_string=querys, idx=idx, _usecache=False))
assert matched_text == expected
def test_tokenize_matched_text_does_cache_last_call_from_query_string_and_location(self):
dictionary = {'module': 0, 'license': 1, 'gpl+': 2}
location = None
query_string = 'the MODULE_LICENSE_GPL+ foobar'
result1 = tokenize_matched_text(location, query_string, dictionary)
result2 = tokenize_matched_text(location, query_string, dictionary)
assert result2 is result1
location = self.get_test_loc('matched_text/tokenize_matched_text_query.txt')
query_string = None
result3 = tokenize_matched_text(location, query_string, dictionary)
assert result3 is not result2
assert result3 == result2
result4 = tokenize_matched_text(location, query_string, dictionary)
assert result4 is result3
def test_tokenize_matched_text_does_return_correct_tokens(self):
querys = u'''
foobar 45 Copyright 2003 (C) James. All Rights Reserved. THIS
IS FROM THE CODEHAUS AND CONTRIBUTORS
'''
dictionary = dict(this=0, event=1, possibility=2, reserved=3, liable=5, copyright=6)
result = tokenize_matched_text(location=None, query_string=querys, dictionary=dictionary)
expected = [
Token(value=u'\n', line_num=1, pos=-1, is_text=False, is_matched=False, is_known=False),
Token(value=u' ', line_num=2, pos=-1, is_text=False, is_matched=False, is_known=False),
Token(value=u'foobar', line_num=2, pos=-1, is_text=True, is_matched=False, is_known=False),
Token(value=u' ', line_num=2, pos=-1, is_text=False, is_matched=False, is_known=False),
Token(value=u'45', line_num=2, pos=-1, is_text=True, is_matched=False, is_known=False),
Token(value=u' ', line_num=2, pos=-1, is_text=False, is_matched=False, is_known=False),
Token(value=u'Copyright', line_num=2, pos=0, is_text=True, is_matched=False, is_known=True),
Token(value=u' ', line_num=2, pos=-1, is_text=False, is_matched=False, is_known=False),
Token(value=u'2003', line_num=2, pos=-1, is_text=True, is_matched=False, is_known=False),
Token(value=u' (', line_num=2, pos=-1, is_text=False, is_matched=False, is_known=False),
Token(value=u'C', line_num=2, pos=-1, is_text=True, is_matched=False, is_known=False),
Token(value=u') ', line_num=2, pos=-1, is_text=False, is_matched=False, is_known=False),
Token(value=u'James', line_num=2, pos=-1, is_text=True, is_matched=False, is_known=False),
Token(value=u'. ', line_num=2, pos=-1, is_text=False, is_matched=False, is_known=False),
Token(value=u'All', line_num=2, pos=-1, is_text=True, is_matched=False, is_known=False),
Token(value=u' ', line_num=2, pos=-1, is_text=False, is_matched=False, is_known=False),
Token(value=u'Rights', line_num=2, pos=-1, is_text=True, is_matched=False, is_known=False),
Token(value=u' ', line_num=2, pos=-1, is_text=False, is_matched=False, is_known=False),
Token(value=u'Reserved', line_num=2, pos=1, is_text=True, is_matched=False, is_known=True),
Token(value=u'. ', line_num=2, pos=-1, is_text=False, is_matched=False, is_known=False),
Token(value=u'THIS', line_num=2, pos=2, is_text=True, is_matched=False, is_known=True),
Token(value=u'\n', line_num=2, pos=-1, is_text=False, is_matched=False, is_known=False),
Token(value=u' ', line_num=3, pos=-1, is_text=False, is_matched=False, is_known=False),
Token(value=u'IS', line_num=3, pos=-1, is_text=True, is_matched=False, is_known=False),
Token(value=u' ', line_num=3, pos=-1, is_text=False, is_matched=False, is_known=False),
Token(value=u'FROM', line_num=3, pos=-1, is_text=True, is_matched=False, is_known=False),
Token(value=u' ', line_num=3, pos=-1, is_text=False, is_matched=False, is_known=False),
Token(value=u'THE', line_num=3, pos=-1, is_text=True, is_matched=False, is_known=False),
Token(value=u' ', line_num=3, pos=-1, is_text=False, is_matched=False, is_known=False),
Token(value=u'CODEHAUS', line_num=3, pos=-1, is_text=True, is_matched=False, is_known=False),
Token(value=u' ', line_num=3, pos=-1, is_text=False, is_matched=False, is_known=False),
Token(value=u'AND', line_num=3, pos=-1, is_text=True, is_matched=False, is_known=False),
Token(value=u' ', line_num=3, pos=-1, is_text=False, is_matched=False, is_known=False),
Token(value=u'CONTRIBUTORS', line_num=3, pos=-1, is_text=True, is_matched=False, is_known=False),
Token(value=u'\n', line_num=3, pos=-1, is_text=False, is_matched=False, is_known=False),
Token(value=' \n', line_num=4, pos=-1, is_text=False, is_matched=False, is_known=False)
]
assert result == expected
def test_tokenize_matched_text_does_not_crash_on_turkish_unicode(self):
querys = u'İrəli'
result = tokenize_matched_text(location=None, query_string=querys, dictionary={})
expected = [
Token(value='i', line_num=1, pos=-1, is_text=True, is_matched=False, is_known=False),
Token(value='rəli', line_num=1, pos=-1, is_text=True, is_matched=False, is_known=False),
Token(value='\n', line_num=1, pos=-1, is_text=False, is_matched=False, is_known=False),
]
assert result == expected
def test_tokenize_matched_text_behaves_like_query_tokenizer_on_turkish_unicode(self):
from licensedcode.tokenize import query_tokenizer
querys = u'İrəli'
matched_text_result = tokenize_matched_text(location=None, query_string=querys, dictionary={})
matched_text_result = [t.value for t in matched_text_result]
query_tokenizer_result = list(query_tokenizer(querys))
if matched_text_result[-1] == '\n':
matched_text_result = matched_text_result[:-1]
assert matched_text_result == query_tokenizer_result
def test_reportable_tokens_filter_tokens_does_not_strip_last_token_value(self):
tokens = [
Token(value=u'\n', line_num=1, pos=-1, is_text=False, is_matched=False, is_known=False),
Token(value=u' ', line_num=2, pos=-1, is_text=False, is_matched=False, is_known=False),
Token(value=u'foobar', line_num=2, pos=-1, is_text=True, is_matched=False, is_known=False),
Token(value=u' ', line_num=2, pos=-1, is_text=False, is_matched=False, is_known=False),
Token(value=u'45', line_num=2, pos=-1, is_text=True, is_matched=False, is_known=False),
Token(value=u' ', line_num=2, pos=-1, is_text=False, is_matched=False, is_known=False),
Token(value=u'Copyright', line_num=2, pos=0, is_text=True, is_matched=False, is_known=True),
Token(value=u' ', line_num=2, pos=-1, is_text=False, is_matched=False, is_known=False),
Token(value=u'2003', line_num=2, pos=-1, is_text=True, is_matched=False, is_known=False),
Token(value=u' (', line_num=2, pos=-1, is_text=False, is_matched=False, is_known=False),
Token(value=u'C', line_num=2, pos=-1, is_text=True, is_matched=False, is_known=False),
Token(value=u') ', line_num=2, pos=-1, is_text=False, is_matched=False, is_known=False),
Token(value=u'James', line_num=2, pos=-1, is_text=True, is_matched=False, is_known=False),
Token(value=u'. ', line_num=2, pos=-1, is_text=False, is_matched=False, is_known=False),
Token(value=u'All', line_num=2, pos=-1, is_text=True, is_matched=False, is_known=False),
Token(value=u' ', line_num=2, pos=-1, is_text=False, is_matched=False, is_known=False),
Token(value=u'Rights', line_num=2, pos=-1, is_text=True, is_matched=False, is_known=False),
Token(value=u' ', line_num=2, pos=-1, is_text=False, is_matched=False, is_known=False),
Token(value=u'Reserved', line_num=2, pos=1, is_text=True, is_matched=False, is_known=True),
Token(value=u'. ', line_num=2, pos=-1, is_text=False, is_matched=False, is_known=False),
Token(value=u'THIS', line_num=2, pos=2, is_text=True, is_matched=False, is_known=True),
Token(value=u'\n', line_num=2, pos=-1, is_text=False, is_matched=False, is_known=False),
Token(value=u' ', line_num=3, pos=-1, is_text=False, is_matched=False, is_known=False),
]
match_qspan = Span(0, 1)
result = list(reportable_tokens(tokens, match_qspan, start_line=1, end_line=2, whole_lines=False))
expected = [
Token(value=u'Copyright', line_num=2, pos=0, is_text=True, is_matched=True, is_known=True),
Token(value=u' ', line_num=2, pos=-1, is_text=False, is_matched=False, is_known=False),
Token(value=u'2003', line_num=2, pos=-1, is_text=True, is_matched=False, is_known=False),
Token(value=u' (', line_num=2, pos=-1, is_text=False, is_matched=False, is_known=False),
Token(value=u'C', line_num=2, pos=-1, is_text=True, is_matched=False, is_known=False),
Token(value=u') ', line_num=2, pos=-1, is_text=False, is_matched=False, is_known=False),
Token(value=u'James', line_num=2, pos=-1, is_text=True, is_matched=False, is_known=False),
Token(value=u'. ', line_num=2, pos=-1, is_text=False, is_matched=False, is_known=False),
Token(value=u'All', line_num=2, pos=-1, is_text=True, is_matched=False, is_known=False),
Token(value=u' ', line_num=2, pos=-1, is_text=False, is_matched=False, is_known=False),
Token(value=u'Rights', line_num=2, pos=-1, is_text=True, is_matched=False, is_known=False),
Token(value=u' ', line_num=2, pos=-1, is_text=False, is_matched=False, is_known=False),
Token(value=u'Reserved', line_num=2, pos=1, is_text=True, is_matched=True, is_known=True),
Token(value=u'. ', line_num=2, pos=-1, is_text=False, is_matched=False, is_known=False)
]
assert result == expected
# test again with whole lines
match_qspan = Span(0, 1)
result = list(reportable_tokens(tokens, match_qspan, start_line=1, end_line=2, whole_lines=True))
expected = [
Token(value=u'\n', line_num=1, pos=-1, is_text=False, is_matched=False, is_known=False),
Token(value=u' ', line_num=2, pos=-1, is_text=False, is_matched=False, is_known=False),
Token(value=u'foobar', line_num=2, pos=-1, is_text=True, is_matched=False, is_known=False),
Token(value=u' ', line_num=2, pos=-1, is_text=False, is_matched=False, is_known=False),
Token(value=u'45', line_num=2, pos=-1, is_text=True, is_matched=False, is_known=False),
Token(value=u' ', line_num=2, pos=-1, is_text=False, is_matched=False, is_known=False),
Token(value=u'Copyright', line_num=2, pos=0, is_text=True, is_matched=True, is_known=True),
Token(value=u' ', line_num=2, pos=-1, is_text=False, is_matched=False, is_known=False),
Token(value=u'2003', line_num=2, pos=-1, is_text=True, is_matched=False, is_known=False),
Token(value=u' (', line_num=2, pos=-1, is_text=False, is_matched=False, is_known=False),
Token(value=u'C', line_num=2, pos=-1, is_text=True, is_matched=False, is_known=False),
Token(value=u') ', line_num=2, pos=-1, is_text=False, is_matched=False, is_known=False),
Token(value=u'James', line_num=2, pos=-1, is_text=True, is_matched=False, is_known=False),
Token(value=u'. ', line_num=2, pos=-1, is_text=False, is_matched=False, is_known=False),
Token(value=u'All', line_num=2, pos=-1, is_text=True, is_matched=False, is_known=False),
Token(value=u' ', line_num=2, pos=-1, is_text=False, is_matched=False, is_known=False),
Token(value=u'Rights', line_num=2, pos=-1, is_text=True, is_matched=False, is_known=False),
Token(value=u' ', line_num=2, pos=-1, is_text=False, is_matched=False, is_known=False),
Token(value=u'Reserved', line_num=2, pos=1, is_text=True, is_matched=True, is_known=True),
Token(value=u'. ', line_num=2, pos=-1, is_text=False, is_matched=False, is_known=False),
Token(value=u'THIS', line_num=2, pos=2, is_text=True, is_matched=False, is_known=True),
Token(value=u'\n', line_num=2, pos=-1, is_text=False, is_matched=False, is_known=False)]
assert result == expected
def test_matched_text_is_collected_correctly_end2end(self):
rules_data_dir = self.get_test_loc('matched_text/index/rules')
query_location = self.get_test_loc('matched_text/query.txt')
rules = models.load_rules(rules_data_dir)
idx = LicenseIndex(rules)
results = [match.matched_text(_usecache=False) for match in idx.match(location=query_location)]
expected = [
'This source code is licensed under both the Apache 2.0 license '
'(found in the\n# LICENSE',
'This source code is licensed under [both] [the] [Apache] [2].[0] license '
'(found in the\n# LICENSE file in the root directory of this source tree)',
'GPLv2 ('
]
assert results == expected
def check_matched_texts(self, test_loc, expected_texts, whole_lines=True):
idx = cache.get_index()
test_loc = self.get_test_loc(test_loc)
matches = idx.match(location=test_loc)
matched_texts = [
m.matched_text(whole_lines=whole_lines, highlight=False, _usecache=False)
for m in matches
]
assert matched_texts == expected_texts
def test_matched_text_is_collected_correctly_end2end_for_spdx_match_whole_lines(self):
self.check_matched_texts(
test_loc='matched_text/spdx/query.txt',
expected_texts=['@REM # SPDX-License-Identifier: BSD-2-Clause-Patent'],
whole_lines=True
)
def test_matched_text_is_collected_correctly_end2end_for_spdx_match_plain(self):
self.check_matched_texts(
test_loc='matched_text/spdx/query.txt',
expected_texts=['SPDX-License-Identifier: BSD-2-Clause-Patent'],
whole_lines=False
)
def test_matched_text_is_not_truncated_with_unicode_diacritic_input_from_query(self):
idx = cache.get_index()
querys_with_diacritic_unicode = 'İ license MIT'
result = idx.match(query_string=querys_with_diacritic_unicode)
assert len(result) == 1
match = result[0]
expected = 'license MIT'
matched_text = match.matched_text(_usecache=False,)
assert matched_text == expected
def test_matched_text_is_not_truncated_with_unicode_diacritic_input_from_file(self):
idx = cache.get_index()
file_with_diacritic_unicode_location = self.get_test_loc('matched_text/unicode_text/main3.js')
result = idx.match(location=file_with_diacritic_unicode_location)
assert len(result) == 1
match = result[0]
expected = 'license MIT'
matched_text = match.matched_text(_usecache=False)
assert matched_text == expected
def test_matched_text_is_not_truncated_with_unicode_diacritic_input_from_query_whole_lines(self):
idx = cache.get_index()
querys_with_diacritic_unicode = 'İ license MIT'
result = idx.match(query_string=querys_with_diacritic_unicode)
assert len(result) == 1
match = result[0]
expected = '[İ] license MIT'
matched_text = match.matched_text(_usecache=False, whole_lines=True)
assert matched_text == expected
def test_matched_text_is_not_truncated_with_unicode_diacritic_input_with_diacritic_in_rules(self):
rule_dir = self.get_test_loc('matched_text/turkish_unicode/rules')
idx = index.LicenseIndex(load_rules(rule_dir))
query_loc = self.get_test_loc('matched_text/turkish_unicode/query')
matches = idx.match(location=query_loc)
matched_texts = [
m.matched_text(whole_lines=False, highlight=False, _usecache=False)
for m in matches
]
expected = [
'Licensed under the Apache License, Version 2.0\r\nnext_label=irəli',
'İ license MIT',
'İ license MIT',
'Licensed under the Apache License, Version 2.0\r\nnext_label=irəli',
'lİcense mit'
]
assert matched_texts == expected
def test_matched_text_is_not_truncated_with_unicode_diacritic_input_and_full_index(self):
expected = [
'Licensed under the Apache License, Version 2.0',
'license MIT',
'license MIT',
'Licensed under the Apache License, Version 2.0'
]
self.check_matched_texts(
test_loc='matched_text/turkish_unicode/query',
expected_texts=expected,
whole_lines=False
)
def test_matched_text_does_not_ignores_whole_lines_in_binary_with_small_index(self):
rule_dir = self.get_test_loc('matched_text/binary_text/rules')
idx = index.LicenseIndex(load_rules(rule_dir))
query_loc = self.get_test_loc('matched_text/binary_text/gosu')
matches = idx.match(location=query_loc)
matched_texts = [
m.matched_text(whole_lines=True, highlight=False, _usecache=False)
for m in matches
]
expected = ['{{ .Self }} license: GPL-3 (full text at https://github.com/tianon/gosu)']
assert matched_texts == expected
def test_matched_text_does_not_ignores_whole_lines_in_binary_against_full_index(self):
expected = ['{{ .Self }} license: GPL-3 (full text at https://github.com/tianon/gosu)']
self.check_matched_texts(
test_loc='matched_text/binary_text/gosu',
expected_texts=expected,
whole_lines=True,
)
def test_matched_text_is_collected_correctly_in_binary_ffmpeg_windows_whole_lines(self):
expected_texts = [
'--enable-gpl --enable-version3 --enable-dxva2 --enable-libmfx --enable-nvenc '
'--enable-avisynth --enable-bzlib --enable-fontconfig --enable-frei0r '
'--enable-gnutls --enable-iconv --enable-libass --enable-libbluray '
'--enable-libbs2b --enable-libcaca --enable-libfreetype --enable-libgme '
'--enable-libgsm --enable-libilbc --enable-libmodplug --enable-libmp3lame '
'--enable-libopencore-amrnb --enable-libopencore-amrwb --enable-libopenh264 '
'--enable-libopenjpeg --enable-libopus --enable-librtmp --enable-libsnappy '
'--enable-libsoxr --enable-libspeex --enable-libtheora --enable-libtwolame '
'--enable-libvidstab --enable-libvo-amrwbenc --enable-libvorbis '
'--enable-libvpx --enable-libwavpack --enable-libwebp --enable-libx264 '
'--enable-libx265 --enable-libxavs --enable-libxvid --enable-libzimg '
'--enable-lzma --enable-decklink --enable-zlib',
'%sconfiguration: --enable-gpl --enable-version3 --enable-dxva2 '
'--enable-libmfx --enable-nvenc --enable-avisynth --enable-bzlib '
'--enable-fontconfig --enable-frei0r --enable-gnutls --enable-iconv '
'--enable-libass --enable-libbluray --enable-libbs2b --enable-libcaca '
'--enable-libfreetype --enable-libgme --enable-libgsm --enable-libilbc '
'--enable-libmodplug --enable-libmp3lame --enable-libopencore-amrnb '
'--enable-libopencore-amrwb --enable-libopenh264 --enable-libopenjpeg '
'--enable-libopus --enable-librtmp --enable-libsnappy --enable-libsoxr '
'--enable-libspeex --enable-libtheora --enable-libtwolame --enable-libvidstab '
'--enable-libvo-amrwbenc --enable-libvorbis --enable-libvpx '
'--enable-libwavpack --enable-libwebp --enable-libx264 --enable-libx265 '
'--enable-libxavs --enable-libxvid --enable-libzimg --enable-lzma '
'--enable-decklink --enable-zlib',
'%s is free software; you can redistribute it and/or modify\n'
'it under the terms of the GNU General Public License as published by\n'
'the Free Software Foundation; either version 3 of the License, or\n'
'(at your option) any later version.\n'
'%s is distributed in the hope that it will be useful,\n'
'but WITHOUT ANY WARRANTY; without even the implied warranty of\n'
'MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n'
'GNU General Public License for more details.\n'
'You should have received a copy of the GNU General Public License\n'
'along with %s. If not, see <http://www.gnu.org/licenses/>.\n'
'File formats:\n'
'D. = Demuxing supported\n'
'.E = Muxing supported\n'
'%s%s %-15s %s\n'
'Devices:\n'
'Codecs:\n'
'D..... = Decoding supported\n'
'.E.... = Encoding supported\n'
'..V... = Video codec\n'
"No option name near '%s'\n"
"Unable to parse '%s': %s\n"
"Setting '%s' to value '%s'\n"
"Option '%s' not found\n"
'--enable-gpl --enable-version3 --enable-dxva2 --enable-libmfx --enable-nvenc '
'--enable-avisynth --enable-bzlib --enable-fontconfig --enable-frei0r '
'--enable-gnutls --enable-iconv --enable-libass --enable-libbluray '
'--enable-libbs2b --enable-libcaca --enable-libfreetype --enable-libgme '
'--enable-libgsm --enable-libilbc --enable-libmodplug --enable-libmp3lame '
'--enable-libopencore-amrnb --enable-libopencore-amrwb --enable-libopenh264 '
'--enable-libopenjpeg --enable-libopus --enable-librtmp --enable-libsnappy '
'--enable-libsoxr --enable-libspeex --enable-libtheora --enable-libtwolame '
'--enable-libvidstab --enable-libvo-amrwbenc --enable-libvorbis '
'--enable-libvpx --enable-libwavpack --enable-libwebp --enable-libx264 '
'--enable-libx265 --enable-libxavs --enable-libxvid --enable-libzimg '
'--enable-lzma --enable-decklink --enable-zlib',
'--enable-gpl --enable-version3 --enable-dxva2 --enable-libmfx --enable-nvenc '
'--enable-avisynth --enable-bzlib --enable-fontconfig --enable-frei0r '
'--enable-gnutls --enable-iconv --enable-libass --enable-libbluray '
'--enable-libbs2b --enable-libcaca --enable-libfreetype --enable-libgme '
'--enable-libgsm --enable-libilbc --enable-libmodplug --enable-libmp3lame '
'--enable-libopencore-amrnb --enable-libopencore-amrwb --enable-libopenh264 '
'--enable-libopenjpeg --enable-libopus --enable-librtmp --enable-libsnappy '
'--enable-libsoxr --enable-libspeex --enable-libtheora --enable-libtwolame '
'--enable-libvidstab --enable-libvo-amrwbenc --enable-libvorbis '
'--enable-libvpx --enable-libwavpack --enable-libwebp --enable-libx264 '
'--enable-libx265 --enable-libxavs --enable-libxvid --enable-libzimg '
'--enable-lzma --enable-decklink --enable-zlib',
'libavfilter license: GPL version 3 or later',
'--enable-gpl --enable-version3 --enable-dxva2 --enable-libmfx --enable-nvenc '
'--enable-avisynth --enable-bzlib --enable-fontconfig --enable-frei0r '
'--enable-gnutls --enable-iconv --enable-libass --enable-libbluray '
'--enable-libbs2b --enable-libcaca --enable-libfreetype --enable-libgme '
'--enable-libgsm --enable-libilbc --enable-libmodplug --enable-libmp3lame '
'--enable-libopencore-amrnb --enable-libopencore-amrwb --enable-libopenh264 '
'--enable-libopenjpeg --enable-libopus --enable-librtmp --enable-libsnappy '
'--enable-libsoxr --enable-libspeex --enable-libtheora --enable-libtwolame '
'--enable-libvidstab --enable-libvo-amrwbenc --enable-libvorbis '
'--enable-libvpx --enable-libwavpack --enable-libwebp --enable-libx264 '
'--enable-libx265 --enable-libxavs --enable-libxvid --enable-libzimg '
'--enable-lzma --enable-decklink --enable-zlib',
'libavformat license: GPL version 3 or later',
'--enable-gpl --enable-version3 --enable-dxva2 --enable-libmfx --enable-nvenc '
'--enable-avisynth --enable-bzlib --enable-fontconfig --enable-frei0r '
'--enable-gnutls --enable-iconv --enable-libass --enable-libbluray '
'--enable-libbs2b --enable-libcaca --enable-libfreetype --enable-libgme '
'--enable-libgsm --enable-libilbc --enable-libmodplug --enable-libmp3lame '
'--enable-libopencore-amrnb --enable-libopencore-amrwb --enable-libopenh264 '
'--enable-libopenjpeg --enable-libopus --enable-librtmp --enable-libsnappy '
'--enable-libsoxr --enable-libspeex --enable-libtheora --enable-libtwolame '
'--enable-libvidstab --enable-libvo-amrwbenc --enable-libvorbis '
'--enable-libvpx --enable-libwavpack --enable-libwebp --enable-libx264 '
'--enable-libx265 --enable-libxavs --enable-libxvid --enable-libzimg '
'--enable-lzma --enable-decklink --enable-zlib',
'libavcodec license: GPL version 3 or later',
'--enable-gpl --enable-version3 --enable-dxva2 --enable-libmfx --enable-nvenc '
'--enable-avisynth --enable-bzlib --enable-fontconfig --enable-frei0r '
'--enable-gnutls --enable-iconv --enable-libass --enable-libbluray '
'--enable-libbs2b --enable-libcaca --enable-libfreetype --enable-libgme '
'--enable-libgsm --enable-libilbc --enable-libmodplug --enable-libmp3lame '
'--enable-libopencore-amrnb --enable-libopencore-amrwb --enable-libopenh264 '
'--enable-libopenjpeg --enable-libopus --enable-librtmp --enable-libsnappy '
'--enable-libsoxr --enable-libspeex --enable-libtheora --enable-libtwolame '
'--enable-libvidstab --enable-libvo-amrwbenc --enable-libvorbis '
'--enable-libvpx --enable-libwavpack --enable-libwebp --enable-libx264 '
'--enable-libx265 --enable-libxavs --enable-libxvid --enable-libzimg '
'--enable-lzma --enable-decklink --enable-zlib',
'libpostproc license: GPL version 3 or later',
'--enable-gpl --enable-version3 --enable-dxva2 --enable-libmfx --enable-nvenc '
'--enable-avisynth --enable-bzlib --enable-fontconfig --enable-frei0r '
'--enable-gnutls --enable-iconv --enable-libass --enable-libbluray '
'--enable-libbs2b --enable-libcaca --enable-libfreetype --enable-libgme '
'--enable-libgsm --enable-libilbc --enable-libmodplug --enable-libmp3lame '
'--enable-libopencore-amrnb --enable-libopencore-amrwb --enable-libopenh264 '
'--enable-libopenjpeg --enable-libopus --enable-librtmp --enable-libsnappy '
'--enable-libsoxr --enable-libspeex --enable-libtheora --enable-libtwolame '
'--enable-libvidstab --enable-libvo-amrwbenc --enable-libvorbis '
'--enable-libvpx --enable-libwavpack --enable-libwebp --enable-libx264 '
'--enable-libx265 --enable-libxavs --enable-libxvid --enable-libzimg '
'--enable-lzma --enable-decklink --enable-zlib',
'libswresample license: GPL version 3 or later',
'--enable-gpl --enable-version3 --enable-dxva2 --enable-libmfx --enable-nvenc '
'--enable-avisynth --enable-bzlib --enable-fontconfig --enable-frei0r '
'--enable-gnutls --enable-iconv --enable-libass --enable-libbluray '
'--enable-libbs2b --enable-libcaca --enable-libfreetype --enable-libgme '
'--enable-libgsm --enable-libilbc --enable-libmodplug --enable-libmp3lame '
'--enable-libopencore-amrnb --enable-libopencore-amrwb --enable-libopenh264 '
'--enable-libopenjpeg --enable-libopus --enable-librtmp --enable-libsnappy '
'--enable-libsoxr --enable-libspeex --enable-libtheora --enable-libtwolame '
'--enable-libvidstab --enable-libvo-amrwbenc --enable-libvorbis '
'--enable-libvpx --enable-libwavpack --enable-libwebp --enable-libx264 '
'--enable-libx265 --enable-libxavs --enable-libxvid --enable-libzimg '
'--enable-lzma --enable-decklink --enable-zlib',
'libswscale license: GPL version 3 or later',
'--enable-gpl --enable-version3 --enable-dxva2 --enable-libmfx --enable-nvenc '
'--enable-avisynth --enable-bzlib --enable-fontconfig --enable-frei0r '
'--enable-gnutls --enable-iconv --enable-libass --enable-libbluray '
'--enable-libbs2b --enable-libcaca --enable-libfreetype --enable-libgme '
'--enable-libgsm --enable-libilbc --enable-libmodplug --enable-libmp3lame '
'--enable-libopencore-amrnb --enable-libopencore-amrwb --enable-libopenh264 '
'--enable-libopenjpeg --enable-libopus --enable-librtmp --enable-libsnappy '
'--enable-libsoxr --enable-libspeex --enable-libtheora --enable-libtwolame '
'--enable-libvidstab --enable-libvo-amrwbenc --enable-libvorbis '
'--enable-libvpx --enable-libwavpack --enable-libwebp --enable-libx264 '
'--enable-libx265 --enable-libxavs --enable-libxvid --enable-libzimg '
'--enable-lzma --enable-decklink --enable-zlib',
'libavutil license: GPL version 3 or later',
'This software is derived from the GNU GPL XviD codec (1.3.0).',
]
self.check_matched_texts(
test_loc='matched_text/ffmpeg/ffmpeg.exe',
expected_texts=expected_texts,
whole_lines=True
)
def test_matched_text_is_collected_correctly_in_binary_ffmpeg_windows_not_whole_lines(self):
expected_texts = [
'enable-gpl --enable-version3 --',
'enable-gpl --enable-version3 --',
'is free software; you can redistribute it and/or modify\n'
'it under the terms of the GNU General Public License as published by\n'
'the Free Software Foundation; either version 3 of the License, or\n'
'(at your option) any later version.\n'
'%s is distributed in the hope that it will be useful,\n'
'but WITHOUT ANY WARRANTY; without even the implied warranty of\n'
'MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n'
'GNU General Public License for more details.\n'
'You should have received a copy of the GNU General Public License\n'
'along with %s. If not, see <http://www.gnu.org/licenses/>.\n'
'File formats:\n'
'D. = Demuxing supported\n'
'.E = Muxing supported\n'
'%s%s %-15s %s\n'
'Devices:\n'
'Codecs:\n'
'D..... = Decoding supported\n'
'.E.... = Encoding supported\n'
'..V... = Video codec\n'
"No option name near '%s'\n"
"Unable to parse '%s': %s\n"
"Setting '%s' to value '%s'\n"
"Option '%s' not found\n"
'--enable-gpl --',
'enable-gpl --enable-version3 --',
'license: GPL version 3 or later',
'enable-gpl --enable-version3 --',
'license: GPL version 3 or later',
'enable-gpl --enable-version3 --',
'license: GPL version 3 or later',
'enable-gpl --enable-version3 --',
'license: GPL version 3 or later',
'enable-gpl --enable-version3 --',
'license: GPL version 3 or later',
'enable-gpl --enable-version3 --',
'license: GPL version 3 or later',
'enable-gpl --enable-version3 --',
'license: GPL version 3 or later',
'This software is derived from the GNU GPL XviD codec ('
]
self.check_matched_texts(
test_loc='matched_text/ffmpeg/ffmpeg.exe',
expected_texts=expected_texts,
whole_lines=False,
)
def test_matched_text_is_collected_correctly_in_binary_ffmpeg_elf_whole_lines(self):
expected_texts = [
'--prefix=/usr --extra-version=0ubuntu0.1 --build-suffix=-ffmpeg '
'--toolchain=hardened --libdir=/usr/lib/x86_64-linux-gnu '
'--incdir=/usr/include/x86_64-linux-gnu --cc=cc --cxx=g++ --enable-gpl '
'--enable-shared --disable-stripping --disable-decoder=libopenjpeg '
'--disable-decoder=libschroedinger --enable-avresample --enable-avisynth '
'--enable-gnutls --enable-ladspa --enable-libass --enable-libbluray '
'--enable-libbs2b --enable-libcaca --enable-libcdio --enable-libflite '
'--enable-libfontconfig --enable-libfreetype --enable-libfribidi '
'--enable-libgme --enable-libgsm --enable-libmodplug --enable-libmp3lame '
'--enable-libopenjpeg --enable-libopus --enable-libpulse --enable-librtmp '
'--enable-libschroedinger --enable-libshine --enable-libsnappy '
'--enable-libsoxr --enable-libspeex --enable-libssh --enable-libtheora '
'--enable-libtwolame --enable-libvorbis --enable-libvpx --enable-libwavpack '
'--enable-libwebp --enable-libx265 --enable-libxvid --enable-libzvbi '
'--enable-openal --enable-opengl --enable-x11grab --enable-libdc1394 '
'--enable-libiec61883 --enable-libzmq --enable-frei0r --enable-libx264 '
'--enable-libopencv',
'%sconfiguration: --prefix=/usr --extra-version=0ubuntu0.1 '
'--build-suffix=-ffmpeg --toolchain=hardened '
'--libdir=/usr/lib/x86_64-linux-gnu --incdir=/usr/include/x86_64-linux-gnu '
'--cc=cc --cxx=g++ --enable-gpl --enable-shared --disable-stripping '
'--disable-decoder=libopenjpeg --disable-decoder=libschroedinger '
'--enable-avresample --enable-avisynth --enable-gnutls --enable-ladspa '
'--enable-libass --enable-libbluray --enable-libbs2b --enable-libcaca '
'--enable-libcdio --enable-libflite --enable-libfontconfig '
'--enable-libfreetype --enable-libfribidi --enable-libgme --enable-libgsm '
'--enable-libmodplug --enable-libmp3lame --enable-libopenjpeg '
'--enable-libopus --enable-libpulse --enable-librtmp --enable-libschroedinger '
'--enable-libshine --enable-libsnappy --enable-libsoxr --enable-libspeex '
'--enable-libssh --enable-libtheora --enable-libtwolame --enable-libvorbis '
'--enable-libvpx --enable-libwavpack --enable-libwebp --enable-libx265 '
'--enable-libxvid --enable-libzvbi --enable-openal --enable-opengl '
'--enable-x11grab --enable-libdc1394 --enable-libiec61883 --enable-libzmq '
'--enable-frei0r --enable-libx264 --enable-libopencv',
'%s is free software; you can redistribute it and/or modify\n'
'it under the terms of the GNU General Public License as published by\n'
'the Free Software Foundation; either version 2 of the License, or\n'
'(at your option) any later version.\n'
'%s is distributed in the hope that it will be useful,\n'
'but WITHOUT ANY WARRANTY; without even the implied warranty of\n'
'MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n'
'GNU General Public License for more details.\n'
'You should have received a copy of the GNU General Public License\n'
'along with %s; if not, write to the Free Software\n'
'Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA'
]
self.check_matched_texts(
test_loc='matched_text/ffmpeg/ffmpeg',
expected_texts=expected_texts,
whole_lines=True,
)
def test_matched_text_is_collected_correctly_in_binary_ffmpeg_static_whole_lines(self):
expected_texts = ['libswresample license: LGPL version 2.1 or later']
self.check_matched_texts(
test_loc='matched_text/ffmpeg/libavsample.lib',
expected_texts=expected_texts,
whole_lines=True,
)
| 49.720729
| 567
| 0.639049
| 14,040
| 103,618
| 4.533903
| 0.048789
| 0.02757
| 0.033932
| 0.039745
| 0.888337
| 0.864993
| 0.847666
| 0.825594
| 0.799532
| 0.779769
| 0
| 0.043823
| 0.23494
| 103,618
| 2,083
| 568
| 49.744599
| 0.759064
| 0.02792
| 0
| 0.625749
| 0
| 0.005389
| 0.270354
| 0.025129
| 0
| 0
| 0
| 0
| 0.123353
| 1
| 0.05988
| false
| 0.001198
| 0.013174
| 0
| 0.079042
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
5389c0437cbdc35cc7d90059103ad85e108aac9d
| 39,492
|
py
|
Python
|
tests/test_asgi.py
|
HyperGH/Yuyo
|
c8ce6ca8e8ba5a7ad18b0f2f74d4be6be239ade4
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_asgi.py
|
HyperGH/Yuyo
|
c8ce6ca8e8ba5a7ad18b0f2f74d4be6be239ade4
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_asgi.py
|
HyperGH/Yuyo
|
c8ce6ca8e8ba5a7ad18b0f2f74d4be6be239ade4
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
# cython: language_level=3
# BSD 3-Clause License
#
# Copyright (c) 2020-2022, Faster Speeding
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import asyncio
import contextlib
import traceback
from unittest import mock
import asgiref.typing
import hikari
import pytest
import yuyo
class TestAsgiAdapter:
@pytest.fixture()
def stub_server(self) -> hikari.api.InteractionServer:
return mock.AsyncMock()
@pytest.fixture()
def adapter(self, stub_server: hikari.api.InteractionServer) -> yuyo.AsgiAdapter:
return yuyo.AsgiAdapter(stub_server)
def test_server_property(self, adapter: yuyo.AsgiAdapter, stub_server: hikari.api.InteractionServer) -> None:
assert adapter.server is stub_server
@pytest.fixture()
def http_scope(self) -> asgiref.typing.HTTPScope:
return asgiref.typing.HTTPScope(
type="http",
asgi=asgiref.typing.ASGIVersions(spec_version="ok", version="3.0"),
http_version="1.1",
method="POST",
scheme="",
path="/",
raw_path=b"",
headers=[],
client=("", 1),
server=("", 1),
extensions=None,
query_string=b"",
root_path="",
)
@pytest.mark.asyncio()
async def test___call___when_http(
self, stub_server: hikari.api.InteractionServer, http_scope: asgiref.typing.HTTPScope
) -> None:
mock_process_request = mock.AsyncMock()
mock_receive = mock.Mock()
mock_send = mock.Mock()
class StubAdapter(yuyo.AsgiAdapter):
process_request = mock_process_request
stub_adapter = StubAdapter(stub_server)
await stub_adapter(http_scope, mock_receive, mock_send)
mock_process_request.assert_awaited_once_with(http_scope, mock_receive, mock_send)
@pytest.mark.asyncio()
async def test___call___when_lifespan(self, stub_server: hikari.api.InteractionServer):
mock_process_lifespan_event = mock.AsyncMock()
mock_receive = mock.Mock()
mock_send = mock.Mock()
mock_scope = asgiref.typing.LifespanScope(
type="lifespan", asgi=asgiref.typing.ASGIVersions(spec_version="ok", version="3.0")
)
class StubAdapter(yuyo.AsgiAdapter):
process_lifespan_event = mock_process_lifespan_event
stub_adapter = StubAdapter(stub_server)
await stub_adapter(mock_scope, mock_receive, mock_send)
mock_process_lifespan_event.assert_awaited_once_with(mock_receive, mock_send)
@pytest.mark.asyncio()
async def test___call___when_webhook(self, adapter: yuyo.AsgiAdapter):
with pytest.raises(NotImplementedError, match="Websocket operations are not supported"):
await adapter(
asgiref.typing.WebSocketScope(
type="websocket",
asgi=asgiref.typing.ASGIVersions(spec_version="ok", version="3.0"),
http_version="...",
scheme="...",
path="/",
raw_path=b"",
query_string=b"",
root_path="",
headers=[],
client=("2", 2),
server=None,
subprotocols=[],
extensions={},
),
mock.AsyncMock(),
mock.AsyncMock(),
)
@pytest.mark.asyncio()
async def test_process_lifespan_event_on_startup(self, adapter: yuyo.AsgiAdapter) -> None:
mock_receive = mock.AsyncMock(return_value={"type": "lifespan.startup"})
mock_send = mock.AsyncMock()
await adapter.process_lifespan_event(mock_receive, mock_send)
mock_receive.assert_awaited_once_with()
mock_send.assert_awaited_once_with({"type": "lifespan.startup.complete"})
@pytest.mark.asyncio()
async def test_process_lifespan_event_on_startup_with_callbacks(self, adapter: yuyo.AsgiAdapter) -> None:
mock_receive = mock.AsyncMock(return_value={"type": "lifespan.startup"})
mock_send = mock.AsyncMock()
mock_async_callback = mock.AsyncMock()
mock_callback = mock.Mock()
adapter.add_startup_callback(mock_async_callback).add_startup_callback(mock_callback)
await adapter.process_lifespan_event(mock_receive, mock_send)
mock_receive.assert_awaited_once_with()
mock_async_callback.assert_awaited_once_with()
mock_callback.assert_called_once_with()
mock_send.assert_awaited_once_with({"type": "lifespan.startup.complete"})
@pytest.mark.asyncio()
async def test_process_lifespan_event_on_startup_when_sync_callback_fails(self, adapter: yuyo.AsgiAdapter) -> None:
mock_receive = mock.AsyncMock(return_value={"type": "lifespan.startup"})
mock_send = mock.AsyncMock()
mock_async_callback = mock.AsyncMock(side_effect=Exception("test"))
mock_callback = mock.Mock()
adapter.add_startup_callback(mock_async_callback).add_startup_callback(mock_callback)
with mock.patch.object(traceback, "format_exc") as format_exc:
await adapter.process_lifespan_event(mock_receive, mock_send)
mock_receive.assert_awaited_once_with()
mock_async_callback.assert_awaited_once_with()
mock_callback.assert_called_once_with()
mock_send.assert_awaited_once_with({"type": "lifespan.startup.failed", "message": format_exc.return_value})
format_exc.assert_called_once_with()
@pytest.mark.asyncio()
async def test_process_lifespan_event_on_startup_when_async_callback_fails(self, adapter: yuyo.AsgiAdapter) -> None:
mock_receive = mock.AsyncMock(return_value={"type": "lifespan.startup"})
mock_send = mock.AsyncMock()
mock_async_callback = mock.AsyncMock()
mock_callback = mock.Mock(side_effect=Exception("test"))
adapter.add_startup_callback(mock_async_callback).add_startup_callback(mock_callback)
with mock.patch.object(traceback, "format_exc") as format_exc:
await adapter.process_lifespan_event(mock_receive, mock_send)
mock_receive.assert_awaited_once_with()
mock_async_callback.assert_awaited_once_with()
mock_callback.assert_called_once_with()
mock_send.assert_awaited_once_with({"type": "lifespan.startup.failed", "message": format_exc.return_value})
format_exc.assert_called_once_with()
@pytest.mark.asyncio()
async def test_process_lifespan_event_on_shutdown(self, adapter: yuyo.AsgiAdapter) -> None:
mock_receive = mock.AsyncMock(return_value={"type": "lifespan.shutdown"})
mock_send = mock.AsyncMock()
await adapter.process_lifespan_event(mock_receive, mock_send)
mock_receive.assert_awaited_once_with()
mock_send.assert_awaited_once_with({"type": "lifespan.shutdown.complete"})
@pytest.mark.asyncio()
async def test_process_lifespan_event_on_shutdown_with_callbacks(self, adapter: yuyo.AsgiAdapter) -> None:
mock_receive = mock.AsyncMock(return_value={"type": "lifespan.shutdown"})
mock_send = mock.AsyncMock()
mock_async_callback = mock.AsyncMock()
mock_callback = mock.Mock()
adapter.add_shutdown_callback(mock_async_callback).add_shutdown_callback(mock_callback)
await adapter.process_lifespan_event(mock_receive, mock_send)
mock_receive.assert_awaited_once_with()
mock_async_callback.assert_awaited_once_with()
mock_callback.assert_called_once_with()
mock_send.assert_awaited_once_with({"type": "lifespan.shutdown.complete"})
@pytest.mark.asyncio()
async def test_process_lifespan_event_on_shutdown_when_sync_callback_fails(self, adapter: yuyo.AsgiAdapter) -> None:
mock_receive = mock.AsyncMock(return_value={"type": "lifespan.shutdown"})
mock_send = mock.AsyncMock()
mock_async_callback = mock.AsyncMock(side_effect=Exception("test"))
mock_callback = mock.Mock()
adapter.add_shutdown_callback(mock_async_callback).add_shutdown_callback(mock_callback)
with mock.patch.object(traceback, "format_exc") as format_exc:
await adapter.process_lifespan_event(mock_receive, mock_send)
mock_receive.assert_awaited_once_with()
mock_async_callback.assert_awaited_once_with()
mock_callback.assert_called_once_with()
mock_send.assert_awaited_once_with({"type": "lifespan.shutdown.failed", "message": format_exc.return_value})
format_exc.assert_called_once_with()
@pytest.mark.asyncio()
async def test_process_lifespan_event_on_shutdown_when_async_callback_fails(
self, adapter: yuyo.AsgiAdapter
) -> None:
mock_receive = mock.AsyncMock(return_value={"type": "lifespan.shutdown"})
mock_send = mock.AsyncMock()
mock_async_callback = mock.AsyncMock()
mock_callback = mock.Mock(side_effect=Exception("test"))
adapter.add_shutdown_callback(mock_async_callback).add_shutdown_callback(mock_callback)
with mock.patch.object(traceback, "format_exc") as format_exc:
await adapter.process_lifespan_event(mock_receive, mock_send)
mock_receive.assert_awaited_once_with()
mock_async_callback.assert_awaited_once_with()
mock_callback.assert_called_once_with()
mock_send.assert_awaited_once_with({"type": "lifespan.shutdown.failed", "message": format_exc.return_value})
format_exc.assert_called_once_with()
@pytest.mark.asyncio()
async def test_process_lifespan_event_on_invalid_lifespan_type(self, adapter: yuyo.AsgiAdapter) -> None:
mock_receive = mock.AsyncMock(return_value={"type": "lifespan.idk"})
mock_send = mock.AsyncMock()
with pytest.raises(RuntimeError, match="Unknown lifespan event lifespan.idk"):
await adapter.process_lifespan_event(mock_receive, mock_send)
mock_receive.assert_awaited_once_with()
mock_send.assert_not_called()
@pytest.mark.asyncio()
async def test_process_request(
self, adapter: yuyo.AsgiAdapter, stub_server: hikari.api.InteractionServer, http_scope: asgiref.typing.HTTPScope
):
http_scope["headers"] = [
(b"Content-Type", b"application/json"),
(b"x-signature-timestamp", b"321123"),
(b"random-header2", b"random value"),
(b"x-signature-ed25519", b"6e796161"),
(b"random-header", b"random value"),
]
mock_receive = mock.AsyncMock(
side_effect=[{"body": b"cat", "more_body": True}, {"body": b"girls", "more_body": False}]
)
mock_send = mock.AsyncMock()
stub_server.on_interaction.return_value.headers = {
"Content-Type": "jazz hands",
"kill": "me baby",
"I am the milk man": "my milk is delicious",
"and the sea shall run white": "with his rage",
}
await adapter.process_request(http_scope, mock_receive, mock_send)
mock_send.assert_has_awaits(
[
mock.call(
{
"type": "http.response.start",
"status": stub_server.on_interaction.return_value.status_code,
"headers": [
(b"Content-Type", b"jazz hands"),
(b"kill", b"me baby"),
(b"I am the milk man", b"my milk is delicious"),
(b"and the sea shall run white", b"with his rage"),
],
}
),
mock.call(
{
"type": "http.response.body",
"body": stub_server.on_interaction.return_value.payload,
"more_body": False,
}
),
]
)
mock_receive.assert_has_awaits([mock.call(), mock.call()])
stub_server.on_interaction.assert_awaited_once_with(bytearray(b"catgirls"), b"nyaa", b"321123")
@pytest.mark.asyncio()
async def test_process_request_when_not_post(
self, adapter: yuyo.AsgiAdapter, stub_server: hikari.api.InteractionServer, http_scope: asgiref.typing.HTTPScope
):
http_scope["method"] = "GET"
http_scope["path"] = "/"
mock_receive = mock.AsyncMock()
mock_send = mock.AsyncMock()
await adapter.process_request(http_scope, mock_receive, mock_send)
mock_send.assert_has_awaits(
[
mock.call(
{
"type": "http.response.start",
"status": 404,
"headers": [(b"content-type", b"text/plain; charset=UTF-8")],
}
),
mock.call({"type": "http.response.body", "body": b"Not found", "more_body": False}),
]
)
mock_receive.assert_not_called()
stub_server.on_interaction.assert_not_called()
@pytest.mark.asyncio()
async def test_process_request_when_not_base_route(
self, adapter: yuyo.AsgiAdapter, stub_server: hikari.api.InteractionServer, http_scope: asgiref.typing.HTTPScope
):
http_scope["method"] = "POST"
http_scope["path"] = "/not-base-route"
mock_receive = mock.AsyncMock()
mock_send = mock.AsyncMock()
await adapter.process_request(http_scope, mock_receive, mock_send)
mock_send.assert_has_awaits(
[
mock.call(
{
"type": "http.response.start",
"status": 404,
"headers": [(b"content-type", b"text/plain; charset=UTF-8")],
}
),
mock.call({"type": "http.response.body", "body": b"Not found", "more_body": False}),
]
)
mock_receive.assert_not_called()
stub_server.on_interaction.assert_not_called()
@pytest.mark.asyncio()
async def test_process_request_when_no_body(
self, adapter: yuyo.AsgiAdapter, stub_server: hikari.api.InteractionServer, http_scope: asgiref.typing.HTTPScope
):
mock_receive = mock.AsyncMock(return_value={"body": b"", "more_body": False})
mock_send = mock.AsyncMock()
await adapter.process_request(http_scope, mock_receive, mock_send)
mock_send.assert_has_awaits(
[
mock.call(
{
"type": "http.response.start",
"status": 400,
"headers": [(b"content-type", b"text/plain; charset=UTF-8")],
}
),
mock.call({"type": "http.response.body", "body": b"POST request must have a body", "more_body": False}),
]
)
mock_receive.assert_awaited_once_with()
stub_server.on_interaction.assert_not_called()
@pytest.mark.asyncio()
async def test_process_request_when_no_body_and_receive_empty(
self, adapter: yuyo.AsgiAdapter, stub_server: hikari.api.InteractionServer, http_scope: asgiref.typing.HTTPScope
):
mock_receive = mock.AsyncMock(return_value={})
mock_send = mock.AsyncMock()
await adapter.process_request(http_scope, mock_receive, mock_send)
mock_send.assert_has_awaits(
[
mock.call(
{
"type": "http.response.start",
"status": 400,
"headers": [(b"content-type", b"text/plain; charset=UTF-8")],
}
),
mock.call({"type": "http.response.body", "body": b"POST request must have a body", "more_body": False}),
]
)
mock_receive.assert_awaited_once_with()
stub_server.on_interaction.assert_not_called()
@pytest.mark.asyncio()
async def test_process_request_when_no_content_type(
self, adapter: yuyo.AsgiAdapter, stub_server: hikari.api.InteractionServer, http_scope: asgiref.typing.HTTPScope
):
http_scope["headers"] = []
mock_receive = mock.AsyncMock(return_value={"body": b"gay", "more_body": False})
mock_send = mock.AsyncMock()
await adapter.process_request(http_scope, mock_receive, mock_send)
mock_send.assert_has_awaits(
[
mock.call(
{
"type": "http.response.start",
"status": 400,
"headers": [(b"content-type", b"text/plain; charset=UTF-8")],
}
),
mock.call(
{"type": "http.response.body", "body": b"Content-Type must be application/json", "more_body": False}
),
]
)
mock_receive.assert_awaited_once_with()
stub_server.on_interaction.assert_not_called()
@pytest.mark.asyncio()
async def test_process_request_when_not_json_content_type(
self, adapter: yuyo.AsgiAdapter, stub_server: hikari.api.InteractionServer, http_scope: asgiref.typing.HTTPScope
):
http_scope["headers"] = [(b"Content-Type", b"NOT JSON")]
mock_receive = mock.AsyncMock(return_value={"body": b"gay", "more_body": False})
mock_send = mock.AsyncMock()
await adapter.process_request(http_scope, mock_receive, mock_send)
mock_send.assert_has_awaits(
[
mock.call(
{
"type": "http.response.start",
"status": 400,
"headers": [(b"content-type", b"text/plain; charset=UTF-8")],
}
),
mock.call(
{"type": "http.response.body", "body": b"Content-Type must be application/json", "more_body": False}
),
]
)
mock_receive.assert_awaited_once_with()
stub_server.on_interaction.assert_not_called()
@pytest.mark.asyncio()
async def test_process_request_when_missing_timestamp_header(
self, adapter: yuyo.AsgiAdapter, stub_server: hikari.api.InteractionServer, http_scope: asgiref.typing.HTTPScope
):
http_scope["headers"] = [(b"Content-Type", b"application/json"), (b"x-signature-ed25519", b"676179")]
mock_receive = mock.AsyncMock(return_value={"body": b"gay", "more_body": False})
mock_send = mock.AsyncMock()
await adapter.process_request(http_scope, mock_receive, mock_send)
mock_send.assert_has_awaits(
[
mock.call(
{
"type": "http.response.start",
"status": 400,
"headers": [(b"content-type", b"text/plain; charset=UTF-8")],
}
),
mock.call(
{
"type": "http.response.body",
"body": b"Missing required request signature header(s)",
"more_body": False,
}
),
]
)
mock_receive.assert_awaited_once_with()
stub_server.on_interaction.assert_not_called()
@pytest.mark.asyncio()
async def test_process_request_when_missing_ed25519_header(
self, adapter: yuyo.AsgiAdapter, stub_server: hikari.api.InteractionServer, http_scope: asgiref.typing.HTTPScope
):
http_scope["headers"] = [(b"Content-Type", b"application/json"), (b"x-signature-timestamp", b"87")]
mock_receive = mock.AsyncMock(return_value={"body": b"gay", "more_body": False})
mock_send = mock.AsyncMock()
await adapter.process_request(http_scope, mock_receive, mock_send)
mock_send.assert_has_awaits(
[
mock.call(
{
"type": "http.response.start",
"status": 400,
"headers": [(b"content-type", b"text/plain; charset=UTF-8")],
}
),
mock.call(
{
"type": "http.response.body",
"body": b"Missing required request signature header(s)",
"more_body": False,
}
),
]
)
mock_receive.assert_awaited_once_with()
stub_server.on_interaction.assert_not_called()
@pytest.mark.parametrize("header_value", ["🇯🇵".encode(), b"trans"])
@pytest.mark.asyncio()
async def test_process_request_when_ed_25519_header_not_valid(
self,
adapter: yuyo.AsgiAdapter,
stub_server: hikari.api.InteractionServer,
http_scope: asgiref.typing.HTTPScope,
header_value: bytes,
):
http_scope["headers"] = [
(b"Content-Type", b"application/json"),
(b"x-signature-timestamp", b"87"),
(b"x-signature-ed25519", header_value),
]
mock_receive = mock.AsyncMock(return_value={"body": b"gay", "more_body": False})
mock_send = mock.AsyncMock()
await adapter.process_request(http_scope, mock_receive, mock_send)
mock_send.assert_has_awaits(
[
mock.call(
{
"type": "http.response.start",
"status": 400,
"headers": [(b"content-type", b"text/plain; charset=UTF-8")],
}
),
mock.call(
{
"type": "http.response.body",
"body": b"Invalid ED25519 signature header found",
"more_body": False,
}
),
]
)
mock_receive.assert_awaited_once_with()
stub_server.on_interaction.assert_not_called()
@pytest.mark.asyncio()
async def test_process_request_when_on_interaction_raises(
self, adapter: yuyo.AsgiAdapter, stub_server: hikari.api.InteractionServer, http_scope: asgiref.typing.HTTPScope
):
http_scope["headers"] = [
(b"x-signature-timestamp", b"653245"),
(b"random-header2", b"random value"),
(b"x-signature-ed25519", b"7472616e73"),
(b"random-header", b"random value"),
(b"Content-Type", b"application/json"),
]
mock_receive = mock.AsyncMock(return_value={"body": b"transive", "more_body": False})
mock_send = mock.AsyncMock()
stub_error = Exception("💩")
stub_server.on_interaction.side_effect = stub_error
with pytest.raises(Exception, match=".*") as exc_info:
await adapter.process_request(http_scope, mock_receive, mock_send)
assert exc_info.value is stub_error
mock_send.assert_has_awaits(
[
mock.call(
{
"type": "http.response.start",
"status": 500,
"headers": [(b"content-type", b"text/plain; charset=UTF-8")],
}
),
mock.call(
{
"type": "http.response.body",
"body": b"Internal Server Error",
"more_body": False,
}
),
]
)
mock_receive.assert_awaited_once_with()
stub_server.on_interaction.assert_awaited_once_with(b"transive", b"trans", b"653245")
@pytest.mark.asyncio()
async def test_process_request_when_no_response_headers_or_body(
self, adapter: yuyo.AsgiAdapter, stub_server: hikari.api.InteractionServer, http_scope: asgiref.typing.HTTPScope
):
http_scope["headers"] = [
(b"Content-Type", b"application/json"),
(b"random-header2", b"random value"),
(b"x-signature-ed25519", b"6e796161"),
(b"x-signature-timestamp", b"321123"),
(b"random-header", b"random value"),
]
mock_receive = mock.AsyncMock(
side_effect=[{"body": b"cat", "more_body": True}, {"body": b"girls", "more_body": False}]
)
mock_send = mock.AsyncMock()
stub_server.on_interaction.return_value.payload = None
stub_server.on_interaction.return_value.headers = None
await adapter.process_request(http_scope, mock_receive, mock_send)
mock_send.assert_has_awaits(
[
mock.call(
{
"type": "http.response.start",
"status": stub_server.on_interaction.return_value.status_code,
"headers": [],
}
),
mock.call(
{
"type": "http.response.body",
"body": b"",
"more_body": False,
}
),
]
)
mock_receive.assert_has_awaits([mock.call(), mock.call()])
stub_server.on_interaction.assert_awaited_once_with(bytearray(b"catgirls"), b"nyaa", b"321123")
class TestAsgiBot:
def test___init___when_asgi_managed(self) -> None:
mock_add_startup_callback = mock.Mock()
mock_add_shutdown_callback = mock.Mock()
class StubBot(yuyo.AsgiBot):
add_startup_callback = mock_add_startup_callback
add_shutdown_callback = mock_add_shutdown_callback
with mock.patch.object(hikari.impl, "EntityFactoryImpl") as mock_entity_factory_impl:
bot = StubBot("token", "Bot")
assert bot.entity_factory is mock_entity_factory_impl.return_value
mock_entity_factory_impl.assert_called_once_with(bot)
mock_add_startup_callback.assert_called_once_with(bot._start)
mock_add_shutdown_callback.assert_called_once_with(bot._close)
def test___init___when_not_asgi_managed(self) -> None:
mock_add_startup_callback = mock.Mock()
mock_add_shutdown_callback = mock.Mock()
class StubBot(yuyo.AsgiBot):
add_startup_callback = mock_add_startup_callback
add_shutdown_callback = mock_add_shutdown_callback
with mock.patch.object(hikari.impl, "EntityFactoryImpl") as mock_entity_factory_impl:
bot = StubBot("token", "Bot", asgi_managed=False)
assert bot.entity_factory is mock_entity_factory_impl.return_value
mock_entity_factory_impl.assert_called_once_with(bot)
mock_add_startup_callback.assert_not_called()
mock_add_shutdown_callback.assert_not_called()
def test_entity_factory_property(self):
with mock.patch.object(hikari.impl, "EntityFactoryImpl") as mock_entity_factory_impl:
bot = yuyo.AsgiBot("token", "Bot")
assert bot.entity_factory is mock_entity_factory_impl.return_value
mock_entity_factory_impl.assert_called_once_with(bot)
def test_executor_property(self):
mock_executor = mock.Mock()
with mock.patch.object(hikari.impl, "RESTClientImpl") as mock_rest_client_impl:
bot = yuyo.AsgiBot("token", "Bot", executor=mock_executor)
mock_rest_client_impl.assert_called_once_with( # noqa: S106
cache=None,
entity_factory=bot.entity_factory,
executor=mock_executor,
http_settings=bot.http_settings,
max_rate_limit=300.0,
proxy_settings=bot.proxy_settings,
rest_url=None,
token="token",
token_type="Bot",
max_retries=3,
)
assert bot.executor is mock_executor
def test_executor_property_when_no_executor(self):
bot = yuyo.AsgiBot("token", "Bot")
assert bot.executor is None
def test_http_settings_property(self):
with mock.patch.object(hikari, "HTTPSettings") as mock_http_settings:
bot = yuyo.AsgiBot("token", "Bot")
assert bot.http_settings is mock_http_settings.return_value
mock_http_settings.assert_called_once_with()
def test_http_settings_property_when_passed_through(self):
mock_settings = mock.Mock()
with mock.patch.object(hikari.impl, "RESTClientImpl") as mock_rest_client_impl:
bot = yuyo.AsgiBot("token", "Bot", http_settings=mock_settings)
mock_rest_client_impl.assert_called_once_with( # noqa: S106
cache=None,
entity_factory=bot.entity_factory,
executor=None,
http_settings=mock_settings,
max_rate_limit=300.0,
proxy_settings=bot.proxy_settings,
rest_url=None,
token="token",
token_type="Bot",
max_retries=3,
)
assert bot.http_settings is mock_settings
def test_interaction_server_property(self):
with mock.patch.object(hikari.impl, "InteractionServer") as mock_interaction_server:
bot = yuyo.AsgiBot("token", "Bot", public_key=b"osososo")
assert bot.interaction_server is mock_interaction_server.return_value
mock_interaction_server.assert_called_once_with(
entity_factory=bot.entity_factory, rest_client=bot.rest, public_key=b"osososo"
)
def test_proxy_settings_property(self):
with mock.patch.object(hikari, "ProxySettings") as mock_proxy_settings:
bot = yuyo.AsgiBot("token", "Bot")
assert bot.proxy_settings is mock_proxy_settings.return_value
mock_proxy_settings.assert_called_once_with()
def test_proxy_settings_property_when_passed_through(self):
mock_settings = mock.Mock()
with mock.patch.object(hikari.impl, "RESTClientImpl") as mock_rest_client_impl:
bot = yuyo.AsgiBot("token", "Bot", proxy_settings=mock_settings)
mock_rest_client_impl.assert_called_once_with( # noqa: S106
cache=None,
entity_factory=bot.entity_factory,
executor=None,
http_settings=bot.http_settings,
max_rate_limit=300.0,
proxy_settings=mock_settings,
rest_url=None,
token="token",
token_type="Bot",
max_retries=3,
)
assert bot.proxy_settings is mock_settings
def test_rest_property(self):
with mock.patch.object(hikari.impl, "RESTClientImpl") as mock_rest_client_impl:
bot = yuyo.AsgiBot("token", "Bot")
mock_rest_client_impl.assert_called_once_with( # noqa: S106
cache=None,
entity_factory=bot.entity_factory,
executor=None,
http_settings=bot.http_settings,
max_rate_limit=300.0,
proxy_settings=bot.proxy_settings,
rest_url=None,
token="token",
token_type="Bot",
max_retries=3,
)
assert bot.rest is mock_rest_client_impl.return_value
def test_run(self):
stack = contextlib.ExitStack()
mock_get_running_loop = stack.enter_context(mock.patch.object(asyncio, "get_running_loop"))
mock_make_event_loop = stack.enter_context(mock.patch.object(asyncio, "new_event_loop"))
mock_set_event_loop = stack.enter_context(mock.patch.object(asyncio, "set_event_loop"))
mock_loop = mock_get_running_loop.return_value
mock_start = mock.Mock()
mock_join = mock.Mock()
class StubBot(yuyo.AsgiBot):
start = mock_start
join = mock_join
bot = StubBot("token", "Bot", asgi_managed=False)
bot.run()
mock_get_running_loop.assert_called_once_with()
mock_make_event_loop.assert_not_called()
mock_set_event_loop.assert_not_called()
mock_start.assert_called_once_with()
mock_join.assert_called_once_with()
mock_loop.run_until_complete.assert_has_calls(
[mock.call(mock_start.return_value), mock.call(mock_join.return_value)]
)
def test_run_makes_new_event_loop(self):
stack = contextlib.ExitStack()
mock_get_running_loop = stack.enter_context(
mock.patch.object(asyncio, "get_running_loop", side_effect=RuntimeError)
)
mock_make_event_loop = stack.enter_context(mock.patch.object(asyncio, "new_event_loop"))
mock_set_event_loop = stack.enter_context(mock.patch.object(asyncio, "set_event_loop"))
mock_loop = mock_make_event_loop.return_value
mock_start = mock.Mock()
mock_join = mock.Mock()
class StubBot(yuyo.AsgiBot):
start = mock_start
join = mock_join
bot = StubBot("token", "Bot", asgi_managed=False)
bot.run()
mock_get_running_loop.assert_called_once_with()
mock_make_event_loop.assert_called_once_with()
mock_set_event_loop.assert_called_once_with(mock_loop)
mock_start.assert_called_once_with()
mock_join.assert_called_once_with()
mock_loop.run_until_complete.assert_has_calls(
[mock.call(mock_start.return_value), mock.call(mock_join.return_value)]
)
@pytest.mark.asyncio()
async def test_run_when_already_alive(self):
mock_join = mock.Mock()
class StubBot(yuyo.AsgiBot):
join = mock_join
with mock.patch.object(hikari.impl, "RESTClientImpl"):
bot = StubBot("token", "Bot", asgi_managed=False)
await bot.start()
with pytest.raises(RuntimeError, match="The client is already running"):
bot.run()
mock_join.assert_not_called()
def test_run_when_asgi_managed(self):
mock_start = mock.Mock()
mock_join = mock.Mock()
class StubBot(yuyo.AsgiBot):
start = mock_start
join = mock_join
bot = StubBot("token", "Bot")
with pytest.raises(RuntimeError, match="The client is being managed by ASGI lifespan events"):
bot.run()
mock_start.assert_not_called()
mock_join.assert_not_called()
@pytest.mark.asyncio()
async def test_start(self):
stack = contextlib.ExitStack()
mock_rest_client_impl = stack.enter_context(mock.patch.object(hikari.impl, "RESTClientImpl"))
mock_event = stack.enter_context(mock.patch.object(asyncio, "Event"))
with stack:
bot = yuyo.AsgiBot("token", "Bot", asgi_managed=False)
await bot.start()
assert bot.is_alive is True
assert bot._join_event is mock_event.return_value
mock_rest_client_impl.return_value.start.assert_called_once_with()
mock_event.assert_called_once_with()
@pytest.mark.asyncio()
async def test_start_when_asgi_managed(self):
with mock.patch.object(hikari.impl, "RESTClientImpl"):
bot = yuyo.AsgiBot("token", "Bot")
with pytest.raises(RuntimeError, match="The client is being managed by ASGI lifespan events"):
await bot.start()
@pytest.mark.asyncio()
async def test_start_when_already_alive(self):
with mock.patch.object(hikari.impl, "RESTClientImpl"):
bot = yuyo.AsgiBot("token", "Bot", asgi_managed=False)
await bot.start()
with pytest.raises(RuntimeError, match="The client is already running"):
await bot.start()
@pytest.mark.asyncio()
async def test_close_when_asgi_managed(self):
bot = yuyo.AsgiBot("token", "Bot")
with pytest.raises(RuntimeError, match="The client is being managed by ASGI lifespan events"):
await bot.close()
@pytest.mark.asyncio()
async def test_close(self):
stack = contextlib.ExitStack()
mock_rest_client_impl = stack.enter_context(mock.patch.object(hikari.impl, "RESTClientImpl"))
mock_rest_client_impl.return_value.close = mock.AsyncMock()
mock_event = stack.enter_context(mock.patch.object(asyncio, "Event"))
with stack:
bot = yuyo.AsgiBot("token", "Bot", asgi_managed=False)
await bot.start()
mock_rest_client_impl.return_value.close.assert_not_called()
mock_event.return_value.set.assert_not_called()
await bot.close()
assert bot.is_alive is False
assert bot._join_event is None
mock_rest_client_impl.return_value.close.assert_awaited_once_with()
mock_event.return_value.set.assert_called_once_with()
@pytest.mark.asyncio()
async def test_close_when_not_alive(self):
bot = yuyo.AsgiBot("token", "Bot", asgi_managed=False)
with pytest.raises(RuntimeError, match="The client is not running"):
await bot.close()
@pytest.mark.asyncio()
async def test_join(self):
with mock.patch.object(hikari.impl, "RESTClientImpl"):
bot = yuyo.AsgiBot("token", "Bot", asgi_managed=False)
with mock.patch.object(asyncio, "Event", return_value=mock.AsyncMock()) as join_event:
await bot.start()
join_event.assert_called_once_with()
join_event.return_value.wait.assert_not_called()
await bot.join()
join_event.return_value.wait.assert_awaited_once_with()
@pytest.mark.asyncio()
async def test_join_when_not_alive(self):
bot = yuyo.AsgiBot("token", "Bot")
with pytest.raises(RuntimeError, match="The client is not running"):
await bot.join()
| 40.380368
| 120
| 0.618986
| 4,476
| 39,492
| 5.158624
| 0.080429
| 0.024946
| 0.031182
| 0.03456
| 0.847726
| 0.816197
| 0.789736
| 0.770637
| 0.739324
| 0.721351
| 0
| 0.007268
| 0.278791
| 39,492
| 977
| 121
| 40.421699
| 0.803314
| 0.040616
| 0
| 0.638677
| 0
| 0
| 0.109268
| 0.007952
| 0
| 0
| 0
| 0
| 0.161578
| 1
| 0.022901
| false
| 0.002545
| 0.010178
| 0.003817
| 0.049618
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
538ead2f3a3d2d4e0e9addda210b530d028ece26
| 79
|
py
|
Python
|
test/src/Python/Functions/return-literal-integer.py
|
milliburn/llvmPy
|
d6fa3002e823fae00cf33d9b2ea480604681376c
|
[
"MIT"
] | 1
|
2019-01-22T02:58:04.000Z
|
2019-01-22T02:58:04.000Z
|
test/src/Python/Functions/return-literal-integer.py
|
roberth-k/llvmPy
|
d6fa3002e823fae00cf33d9b2ea480604681376c
|
[
"MIT"
] | null | null | null |
test/src/Python/Functions/return-literal-integer.py
|
roberth-k/llvmPy
|
d6fa3002e823fae00cf33d9b2ea480604681376c
|
[
"MIT"
] | null | null | null |
# RUN: %S/../test.sh %s
def func():
return 1
print(func()) # CHECK: 1
| 8.777778
| 25
| 0.506329
| 13
| 79
| 3.076923
| 0.769231
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.034483
| 0.265823
| 79
| 8
| 26
| 9.875
| 0.655172
| 0.379747
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0
| 0.333333
| 0.666667
| 0.333333
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
0733388e2145a6c34fc4cda59a4a77944633f80a
| 241
|
py
|
Python
|
activities_photos/serializer.py
|
uestc-msc/uestcmsc_webapp_backend
|
fce859899346598f5a263b6fabb74deec816bc8c
|
[
"MIT"
] | 1
|
2021-01-04T01:56:26.000Z
|
2021-01-04T01:56:26.000Z
|
activities_photos/serializer.py
|
uestc-msc/uestcmsc_webapp_backend
|
fce859899346598f5a263b6fabb74deec816bc8c
|
[
"MIT"
] | null | null | null |
activities_photos/serializer.py
|
uestc-msc/uestcmsc_webapp_backend
|
fce859899346598f5a263b6fabb74deec816bc8c
|
[
"MIT"
] | null | null | null |
from activities_files.serializer import ActivityFileSerializer
from .models import ActivityPhoto
# 序列化器也复用 ActivityFileSerializer
class ActivityPhotoSerializer(ActivityFileSerializer):
class Meta:
model = ActivityPhoto
| 26.777778
| 63
| 0.80083
| 19
| 241
| 10.105263
| 0.684211
| 0.28125
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.165975
| 241
| 8
| 64
| 30.125
| 0.955224
| 0.124481
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.4
| 0
| 0.8
| 0
| 1
| 0
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
073638edef453ca25440758bcee2af3c96582761
| 154
|
py
|
Python
|
src/forum/admin.py
|
earth-emoji/infotechia
|
44ed7aecf052001573b47320e6a1239968d2a067
|
[
"BSD-2-Clause"
] | null | null | null |
src/forum/admin.py
|
earth-emoji/infotechia
|
44ed7aecf052001573b47320e6a1239968d2a067
|
[
"BSD-2-Clause"
] | 11
|
2019-10-27T23:41:10.000Z
|
2022-02-10T10:30:00.000Z
|
src/forum/admin.py
|
earth-emoji/infotechia
|
44ed7aecf052001573b47320e6a1239968d2a067
|
[
"BSD-2-Clause"
] | null | null | null |
from django.contrib import admin
from django.db import models
from .models import Thread, Topic
admin.site.register (Thread)
admin.site.register(Topic)
| 19.25
| 33
| 0.805195
| 23
| 154
| 5.391304
| 0.478261
| 0.16129
| 0.274194
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.116883
| 154
| 7
| 34
| 22
| 0.911765
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.6
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
07555e4b6900835b0ef0152b4c9922e0ef9e5d1d
| 4,881
|
py
|
Python
|
ros/src/super_fast_object_detection/src/veloster_2sides_bev_utils.py
|
hyungikim4/SFA3D
|
2e46d54276ce6ad9413fecd9f0aebbf6332554ed
|
[
"MIT"
] | 2
|
2021-01-07T14:30:25.000Z
|
2021-05-24T11:12:15.000Z
|
ros/src/super_fast_object_detection/src/veloster_2sides_bev_utils.py
|
hyungikim4/SFA3D
|
2e46d54276ce6ad9413fecd9f0aebbf6332554ed
|
[
"MIT"
] | 3
|
2021-06-02T02:30:37.000Z
|
2021-08-21T11:34:14.000Z
|
ros/src/super_fast_object_detection/src/veloster_2sides_bev_utils.py
|
hyungikim4/SFA3D
|
2e46d54276ce6ad9413fecd9f0aebbf6332554ed
|
[
"MIT"
] | 3
|
2021-06-10T04:55:49.000Z
|
2022-02-06T12:05:13.000Z
|
"""
# -*- coding: utf-8 -*-
-----------------------------------------------------------------------------------
"""
import math
import sys
import cv2
import numpy as np
sys.path.append('../')
import veloster_config_2sides as cnf
def makeBEVMap_binary(PointCloud_, boundary):
Height = cnf.BEV_HEIGHT + 1
Width = cnf.BEV_WIDTH + 1
# Discretize Feature Map
PointCloud = np.copy(PointCloud_)
PointCloud[:, 0] = np.int_(np.floor(PointCloud[:, 0] / cnf.DISCRETIZATION) + Height /2)
PointCloud[:, 1] = np.int_(np.floor(PointCloud[:, 1] / cnf.DISCRETIZATION) + Width / 2)
# sort-3times
indices = np.lexsort((-PointCloud[:, 2], PointCloud[:, 1], PointCloud[:, 0]))
PointCloud = PointCloud[indices]
# Height Map
heightMap = np.zeros((Height, Width))
_, indices = np.unique(PointCloud[:, 0:2], axis=0, return_index=True)
PointCloud_frac = PointCloud[indices]
# some important problem is image coordinate is (y,x), not (x,y)
max_height = float(np.abs(boundary['maxZ'] - boundary['minZ']))
heightMap[np.int_(PointCloud_frac[:, 0]), np.int_(PointCloud_frac[:, 1])] = PointCloud_frac[:, 2] / max_height
# Intensity Map & DensityMap
intensityMap = np.zeros((Height, Width))
densityMap = np.zeros((Height, Width))
_, indices, counts = np.unique(PointCloud[:, 0:2], axis=0, return_index=True, return_counts=True)
PointCloud_top = PointCloud[indices]
normalizedCounts = np.minimum(1.0, np.log(counts + 1) / np.log(64))
intensityMap[np.int_(PointCloud_top[:, 0]), np.int_(PointCloud_top[:, 1])] = PointCloud_top[:, 3]
densityMap[np.int_(PointCloud_top[:, 0]), np.int_(PointCloud_top[:, 1])] = 1
RGB_Map = np.zeros((3, Height - 1, Width - 1))
RGB_Map[2, :, :] = densityMap[:cnf.BEV_HEIGHT, :cnf.BEV_WIDTH] # r_map
RGB_Map[1, :, :] = densityMap[:cnf.BEV_HEIGHT, :cnf.BEV_WIDTH] # g_map
RGB_Map[0, :, :] = densityMap[:cnf.BEV_HEIGHT, :cnf.BEV_WIDTH] # b_map
return RGB_Map
def makeBEVMap(PointCloud_, boundary):
Height = cnf.BEV_HEIGHT + 1
Width = cnf.BEV_WIDTH + 1
# Discretize Feature Map
PointCloud = np.copy(PointCloud_)
PointCloud[:, 0] = np.int_(np.floor(PointCloud[:, 0] / cnf.DISCRETIZATION) + Height / 2)
PointCloud[:, 1] = np.int_(np.floor(PointCloud[:, 1] / cnf.DISCRETIZATION) + Width / 2)
# sort-3times
indices = np.lexsort((-PointCloud[:, 2], PointCloud[:, 1], PointCloud[:, 0]))
PointCloud = PointCloud[indices]
# Height Map
heightMap = np.zeros((Height, Width))
_, indices = np.unique(PointCloud[:, 0:2], axis=0, return_index=True)
PointCloud_frac = PointCloud[indices]
# some important problem is image coordinate is (y,x), not (x,y)
max_height = float(np.abs(boundary['maxZ'] - boundary['minZ']))
heightMap[np.int_(PointCloud_frac[:, 0]), np.int_(PointCloud_frac[:, 1])] = PointCloud_frac[:, 2] / max_height
# Intensity Map & DensityMap
intensityMap = np.zeros((Height, Width))
densityMap = np.zeros((Height, Width))
_, indices, counts = np.unique(PointCloud[:, 0:2], axis=0, return_index=True, return_counts=True)
PointCloud_top = PointCloud[indices]
normalizedCounts = np.minimum(1.0, np.log(counts + 1) / np.log(64))
intensityMap[np.int_(PointCloud_top[:, 0]), np.int_(PointCloud_top[:, 1])] = PointCloud_top[:, 3]
densityMap[np.int_(PointCloud_top[:, 0]), np.int_(PointCloud_top[:, 1])] = normalizedCounts
RGB_Map = np.zeros((3, Height - 1, Width - 1))
RGB_Map[2, :, :] = densityMap[:cnf.BEV_HEIGHT, :cnf.BEV_WIDTH] # r_map
RGB_Map[1, :, :] = heightMap[:cnf.BEV_HEIGHT, :cnf.BEV_WIDTH] # g_map
RGB_Map[0, :, :] = intensityMap[:cnf.BEV_HEIGHT, :cnf.BEV_WIDTH] # b_map
return RGB_Map
# bev image coordinates format
def get_corners(x, y, w, l, yaw):
bev_corners = np.zeros((4, 2), dtype=np.float32)
cos_yaw = np.cos(yaw)
sin_yaw = np.sin(yaw)
# front left
bev_corners[0, 0] = x - w / 2 * cos_yaw - l / 2 * sin_yaw
bev_corners[0, 1] = y - w / 2 * sin_yaw + l / 2 * cos_yaw
# rear left
bev_corners[1, 0] = x - w / 2 * cos_yaw + l / 2 * sin_yaw
bev_corners[1, 1] = y - w / 2 * sin_yaw - l / 2 * cos_yaw
# rear right
bev_corners[2, 0] = x + w / 2 * cos_yaw + l / 2 * sin_yaw
bev_corners[2, 1] = y + w / 2 * sin_yaw - l / 2 * cos_yaw
# front right
bev_corners[3, 0] = x + w / 2 * cos_yaw - l / 2 * sin_yaw
bev_corners[3, 1] = y + w / 2 * sin_yaw + l / 2 * cos_yaw
return bev_corners
def drawRotatedBox(img, x, y, w, l, yaw, color):
bev_corners = get_corners(x, y, w, l, yaw)
corners_int = bev_corners.reshape(-1, 1, 2).astype(int)
cv2.polylines(img, [corners_int], True, color, 2)
corners_int = bev_corners.reshape(-1, 2)
cv2.line(img, (corners_int[0, 0], corners_int[0, 1]), (corners_int[3, 0], corners_int[3, 1]), (255, 255, 0), 2)
| 37.837209
| 115
| 0.62774
| 707
| 4,881
| 4.151344
| 0.138614
| 0.032709
| 0.061329
| 0.049063
| 0.831005
| 0.82862
| 0.80954
| 0.791141
| 0.791141
| 0.791141
| 0
| 0.035016
| 0.192583
| 4,881
| 128
| 116
| 38.132813
| 0.709718
| 0.100184
| 0
| 0.56
| 0
| 0
| 0.004354
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.053333
| false
| 0
| 0.066667
| 0
| 0.16
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
075bb885ac92b2db1804d37f83387e3fec363b21
| 67,178
|
py
|
Python
|
tests/test_0046-histograms-bh-hist.py
|
eic/uproot4
|
deb8d88c2643521f372bf5005c51af8926016c7e
|
[
"BSD-3-Clause"
] | 133
|
2020-05-08T21:34:11.000Z
|
2022-03-07T18:12:58.000Z
|
tests/test_0046-histograms-bh-hist.py
|
eic/uproot4
|
deb8d88c2643521f372bf5005c51af8926016c7e
|
[
"BSD-3-Clause"
] | 269
|
2020-05-13T02:42:24.000Z
|
2022-03-24T20:24:16.000Z
|
tests/test_0046-histograms-bh-hist.py
|
eic/uproot4
|
deb8d88c2643521f372bf5005c51af8926016c7e
|
[
"BSD-3-Clause"
] | 45
|
2020-05-15T17:48:04.000Z
|
2022-03-18T19:23:07.000Z
|
# BSD 3-Clause License; see https://github.com/scikit-hep/uproot4/blob/main/LICENSE
from __future__ import absolute_import
import numpy
import pytest
import skhep_testdata
import uproot
def test_numpy_1d():
with uproot.open(skhep_testdata.data_path("uproot-hepdata-example.root")) as f:
values, edges = f["hpx"].to_numpy(flow=True)
assert values.tolist() == [
2.0,
2.0,
3.0,
1.0,
1.0,
2.0,
4.0,
6.0,
12.0,
8.0,
9.0,
15.0,
15.0,
31.0,
35.0,
40.0,
64.0,
64.0,
81.0,
108.0,
124.0,
156.0,
165.0,
209.0,
262.0,
297.0,
392.0,
432.0,
466.0,
521.0,
604.0,
657.0,
788.0,
903.0,
1079.0,
1135.0,
1160.0,
1383.0,
1458.0,
1612.0,
1770.0,
1868.0,
1861.0,
1946.0,
2114.0,
2175.0,
2207.0,
2273.0,
2276.0,
2329.0,
2325.0,
2381.0,
2417.0,
2364.0,
2284.0,
2188.0,
2164.0,
2130.0,
1940.0,
1859.0,
1763.0,
1700.0,
1611.0,
1459.0,
1390.0,
1237.0,
1083.0,
1046.0,
888.0,
752.0,
742.0,
673.0,
555.0,
533.0,
366.0,
378.0,
272.0,
256.0,
200.0,
174.0,
132.0,
118.0,
100.0,
89.0,
86.0,
39.0,
37.0,
25.0,
23.0,
20.0,
16.0,
14.0,
9.0,
13.0,
8.0,
2.0,
2.0,
6.0,
1.0,
0.0,
1.0,
4.0,
]
assert edges.tolist() == [
-numpy.inf,
-4.0,
-3.92,
-3.84,
-3.76,
-3.68,
-3.6,
-3.52,
-3.44,
-3.36,
-3.2800000000000002,
-3.2,
-3.12,
-3.04,
-2.96,
-2.88,
-2.8,
-2.7199999999999998,
-2.6399999999999997,
-2.56,
-2.48,
-2.4,
-2.3200000000000003,
-2.24,
-2.16,
-2.08,
-2.0,
-1.92,
-1.8399999999999999,
-1.7599999999999998,
-1.6800000000000002,
-1.6,
-1.52,
-1.44,
-1.3599999999999999,
-1.2799999999999998,
-1.1999999999999997,
-1.12,
-1.04,
-0.96,
-0.8799999999999999,
-0.7999999999999998,
-0.7199999999999998,
-0.6400000000000001,
-0.56,
-0.48,
-0.3999999999999999,
-0.31999999999999984,
-0.23999999999999977,
-0.16000000000000014,
-0.08000000000000007,
0.0,
0.08000000000000007,
0.16000000000000014,
0.2400000000000002,
0.3200000000000003,
0.40000000000000036,
0.4800000000000004,
0.5600000000000005,
0.6399999999999997,
0.7199999999999998,
0.7999999999999998,
0.8799999999999999,
0.96,
1.04,
1.12,
1.2000000000000002,
1.2800000000000002,
1.3600000000000003,
1.4400000000000004,
1.5200000000000005,
1.6000000000000005,
1.6799999999999997,
1.7599999999999998,
1.8399999999999999,
1.92,
2.0,
2.08,
2.16,
2.24,
2.3200000000000003,
2.4000000000000004,
2.4800000000000004,
2.5600000000000005,
2.6400000000000006,
2.7199999999999998,
2.8,
2.88,
2.96,
3.04,
3.12,
3.2,
3.2800000000000002,
3.3600000000000003,
3.4400000000000004,
3.5200000000000005,
3.6000000000000005,
3.6799999999999997,
3.76,
3.84,
3.92,
4.0,
numpy.inf,
]
f["hpx"].errors()
def test_numpy_2d():
with uproot.open(skhep_testdata.data_path("uproot-hepdata-example.root")) as f:
values, xedges, yedges = f["hpxpy"].to_numpy(flow=True)
assert values.tolist() == [
[
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
1.0,
0.0,
0.0,
1.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
],
[
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
1.0,
0.0,
0.0,
0.0,
0.0,
1.0,
0.0,
1.0,
0.0,
0.0,
0.0,
0.0,
0.0,
1.0,
0.0,
1.0,
0.0,
1.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
],
[
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
1.0,
0.0,
0.0,
1.0,
1.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
],
[
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
1.0,
0.0,
0.0,
1.0,
0.0,
2.0,
4.0,
1.0,
0.0,
2.0,
1.0,
1.0,
0.0,
1.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
],
[
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
1.0,
0.0,
1.0,
1.0,
1.0,
2.0,
0.0,
2.0,
2.0,
0.0,
1.0,
1.0,
2.0,
2.0,
0.0,
1.0,
5.0,
2.0,
0.0,
1.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
],
[
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
1.0,
0.0,
0.0,
1.0,
0.0,
2.0,
0.0,
2.0,
1.0,
3.0,
4.0,
3.0,
4.0,
4.0,
3.0,
3.0,
6.0,
1.0,
0.0,
1.0,
1.0,
1.0,
0.0,
1.0,
0.0,
0.0,
1.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
],
[
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
1.0,
1.0,
4.0,
1.0,
4.0,
5.0,
2.0,
7.0,
7.0,
9.0,
13.0,
10.0,
4.0,
3.0,
3.0,
4.0,
6.0,
3.0,
1.0,
1.0,
0.0,
3.0,
0.0,
0.0,
1.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
],
[
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
3.0,
3.0,
2.0,
9.0,
4.0,
8.0,
7.0,
8.0,
10.0,
17.0,
10.0,
13.0,
17.0,
17.0,
9.0,
12.0,
1.0,
6.0,
7.0,
2.0,
1.0,
1.0,
2.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
],
[
0.0,
0.0,
0.0,
0.0,
0.0,
1.0,
0.0,
2.0,
1.0,
0.0,
2.0,
2.0,
7.0,
7.0,
11.0,
12.0,
13.0,
16.0,
25.0,
16.0,
18.0,
21.0,
22.0,
20.0,
19.0,
9.0,
9.0,
16.0,
7.0,
3.0,
4.0,
6.0,
2.0,
0.0,
1.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
],
[
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
1.0,
0.0,
1.0,
5.0,
4.0,
7.0,
5.0,
12.0,
5.0,
16.0,
23.0,
28.0,
28.0,
25.0,
37.0,
41.0,
41.0,
27.0,
24.0,
21.0,
19.0,
16.0,
15.0,
11.0,
4.0,
4.0,
2.0,
1.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
],
[
0.0,
0.0,
0.0,
0.0,
0.0,
1.0,
0.0,
0.0,
4.0,
1.0,
6.0,
6.0,
14.0,
14.0,
21.0,
26.0,
46.0,
42.0,
47.0,
52.0,
44.0,
51.0,
53.0,
41.0,
56.0,
30.0,
24.0,
19.0,
20.0,
21.0,
12.0,
8.0,
1.0,
2.0,
3.0,
0.0,
0.0,
0.0,
0.0,
0.0,
1.0,
0.0,
],
[
0.0,
0.0,
0.0,
0.0,
3.0,
2.0,
3.0,
3.0,
4.0,
6.0,
11.0,
8.0,
20.0,
36.0,
47.0,
40.0,
49.0,
61.0,
61.0,
70.0,
87.0,
95.0,
90.0,
74.0,
62.0,
66.0,
50.0,
42.0,
24.0,
14.0,
16.0,
7.0,
7.0,
2.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
],
[
0.0,
1.0,
0.0,
0.0,
1.0,
0.0,
1.0,
4.0,
5.0,
9.0,
10.0,
21.0,
28.0,
31.0,
39.0,
48.0,
88.0,
87.0,
80.0,
102.0,
92.0,
108.0,
100.0,
97.0,
100.0,
71.0,
76.0,
35.0,
32.0,
26.0,
31.0,
12.0,
9.0,
4.0,
4.0,
2.0,
0.0,
0.0,
1.0,
0.0,
0.0,
0.0,
],
[
0.0,
0.0,
0.0,
0.0,
0.0,
2.0,
6.0,
5.0,
11.0,
9.0,
18.0,
23.0,
32.0,
54.0,
69.0,
81.0,
106.0,
105.0,
126.0,
132.0,
140.0,
148.0,
137.0,
130.0,
121.0,
104.0,
88.0,
68.0,
53.0,
35.0,
30.0,
16.0,
9.0,
6.0,
3.0,
8.0,
2.0,
0.0,
1.0,
0.0,
0.0,
0.0,
],
[
0.0,
0.0,
1.0,
0.0,
1.0,
4.0,
1.0,
5.0,
7.0,
22.0,
20.0,
44.0,
57.0,
60.0,
100.0,
149.0,
148.0,
155.0,
201.0,
198.0,
198.0,
216.0,
207.0,
182.0,
159.0,
153.0,
102.0,
104.0,
66.0,
44.0,
28.0,
21.0,
8.0,
11.0,
4.0,
4.0,
1.0,
1.0,
0.0,
1.0,
0.0,
1.0,
],
[
0.0,
0.0,
0.0,
0.0,
2.0,
2.0,
3.0,
6.0,
8.0,
16.0,
34.0,
53.0,
58.0,
88.0,
106.0,
131.0,
179.0,
215.0,
206.0,
274.0,
236.0,
261.0,
243.0,
240.0,
207.0,
162.0,
138.0,
115.0,
85.0,
65.0,
59.0,
27.0,
22.0,
13.0,
7.0,
5.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
],
[
1.0,
2.0,
0.0,
2.0,
1.0,
5.0,
6.0,
9.0,
13.0,
20.0,
39.0,
60.0,
74.0,
94.0,
145.0,
171.0,
211.0,
253.0,
281.0,
321.0,
311.0,
354.0,
317.0,
289.0,
269.0,
221.0,
199.0,
139.0,
97.0,
73.0,
50.0,
31.0,
29.0,
9.0,
11.0,
4.0,
3.0,
2.0,
0.0,
0.0,
1.0,
0.0,
],
[
0.0,
0.0,
0.0,
1.0,
0.0,
2.0,
3.0,
17.0,
17.0,
29.0,
42.0,
73.0,
93.0,
104.0,
169.0,
222.0,
232.0,
250.0,
361.0,
346.0,
375.0,
363.0,
349.0,
333.0,
312.0,
247.0,
195.0,
176.0,
109.0,
92.0,
51.0,
43.0,
26.0,
17.0,
7.0,
6.0,
2.0,
2.0,
2.0,
0.0,
1.0,
0.0,
],
[
0.0,
0.0,
0.0,
2.0,
1.0,
2.0,
6.0,
8.0,
16.0,
33.0,
51.0,
95.0,
93.0,
134.0,
164.0,
231.0,
298.0,
353.0,
341.0,
420.0,
432.0,
425.0,
404.0,
360.0,
326.0,
301.0,
211.0,
175.0,
139.0,
93.0,
62.0,
56.0,
26.0,
11.0,
11.0,
11.0,
1.0,
0.0,
2.0,
1.0,
0.0,
0.0,
],
[
0.0,
0.0,
0.0,
2.0,
1.0,
1.0,
9.0,
13.0,
28.0,
21.0,
47.0,
82.0,
106.0,
150.0,
199.0,
241.0,
284.0,
334.0,
403.0,
479.0,
445.0,
438.0,
408.0,
386.0,
316.0,
300.0,
218.0,
231.0,
135.0,
111.0,
77.0,
68.0,
27.0,
27.0,
12.0,
3.0,
6.0,
0.0,
1.0,
0.0,
0.0,
1.0,
],
[
0.0,
0.0,
0.0,
0.0,
1.0,
5.0,
6.0,
13.0,
16.0,
35.0,
68.0,
68.0,
95.0,
142.0,
190.0,
260.0,
287.0,
363.0,
403.0,
448.0,
478.0,
446.0,
439.0,
401.0,
396.0,
314.0,
245.0,
226.0,
134.0,
114.0,
66.0,
44.0,
29.0,
23.0,
14.0,
8.0,
12.0,
6.0,
3.0,
0.0,
2.0,
0.0,
],
[
0.0,
0.0,
0.0,
1.0,
2.0,
3.0,
9.0,
14.0,
22.0,
34.0,
60.0,
86.0,
129.0,
179.0,
210.0,
270.0,
275.0,
370.0,
416.0,
445.0,
497.0,
449.0,
440.0,
426.0,
385.0,
278.0,
273.0,
210.0,
141.0,
115.0,
77.0,
50.0,
32.0,
25.0,
15.0,
8.0,
5.0,
3.0,
3.0,
0.0,
0.0,
0.0,
],
[
1.0,
0.0,
0.0,
0.0,
1.0,
4.0,
5.0,
11.0,
24.0,
19.0,
41.0,
88.0,
126.0,
120.0,
197.0,
260.0,
281.0,
344.0,
398.0,
411.0,
476.0,
436.0,
488.0,
393.0,
331.0,
302.0,
236.0,
205.0,
171.0,
115.0,
61.0,
65.0,
23.0,
19.0,
11.0,
4.0,
5.0,
2.0,
0.0,
3.0,
0.0,
0.0,
],
[
0.0,
0.0,
0.0,
2.0,
2.0,
4.0,
2.0,
13.0,
22.0,
32.0,
47.0,
72.0,
103.0,
135.0,
209.0,
200.0,
284.0,
341.0,
360.0,
391.0,
412.0,
424.0,
443.0,
370.0,
323.0,
262.0,
221.0,
180.0,
159.0,
91.0,
75.0,
38.0,
28.0,
24.0,
10.0,
6.0,
1.0,
2.0,
0.0,
1.0,
0.0,
0.0,
],
[
1.0,
0.0,
0.0,
0.0,
3.0,
1.0,
4.0,
6.0,
18.0,
30.0,
37.0,
66.0,
98.0,
119.0,
141.0,
203.0,
233.0,
303.0,
345.0,
348.0,
360.0,
367.0,
350.0,
302.0,
280.0,
251.0,
203.0,
155.0,
121.0,
64.0,
49.0,
43.0,
28.0,
21.0,
8.0,
4.0,
2.0,
1.0,
1.0,
1.0,
0.0,
0.0,
],
[
0.0,
1.0,
0.0,
0.0,
0.0,
4.0,
4.0,
10.0,
17.0,
28.0,
43.0,
52.0,
75.0,
108.0,
162.0,
155.0,
211.0,
268.0,
278.0,
339.0,
331.0,
339.0,
305.0,
239.0,
241.0,
223.0,
161.0,
136.0,
93.0,
86.0,
63.0,
32.0,
25.0,
15.0,
10.0,
0.0,
2.0,
1.0,
0.0,
0.0,
0.0,
1.0,
],
[
2.0,
0.0,
0.0,
0.0,
1.0,
1.0,
1.0,
5.0,
10.0,
17.0,
27.0,
40.0,
86.0,
91.0,
123.0,
150.0,
172.0,
197.0,
247.0,
237.0,
255.0,
279.0,
271.0,
218.0,
189.0,
194.0,
152.0,
108.0,
92.0,
52.0,
41.0,
32.0,
16.0,
22.0,
5.0,
1.0,
4.0,
1.0,
0.0,
0.0,
0.0,
0.0,
],
[
1.0,
1.0,
0.0,
0.0,
1.0,
2.0,
6.0,
4.0,
6.0,
14.0,
22.0,
28.0,
57.0,
56.0,
87.0,
111.0,
142.0,
169.0,
206.0,
202.0,
211.0,
209.0,
181.0,
174.0,
158.0,
157.0,
105.0,
89.0,
62.0,
44.0,
34.0,
20.0,
15.0,
12.0,
9.0,
7.0,
1.0,
1.0,
1.0,
0.0,
0.0,
0.0,
],
[
0.0,
0.0,
0.0,
1.0,
1.0,
1.0,
1.0,
5.0,
4.0,
8.0,
15.0,
27.0,
33.0,
38.0,
64.0,
67.0,
84.0,
119.0,
131.0,
153.0,
165.0,
151.0,
151.0,
129.0,
126.0,
125.0,
92.0,
70.0,
46.0,
33.0,
23.0,
22.0,
10.0,
7.0,
2.0,
1.0,
1.0,
0.0,
0.0,
0.0,
0.0,
0.0,
],
[
0.0,
0.0,
0.0,
0.0,
1.0,
2.0,
2.0,
7.0,
8.0,
11.0,
16.0,
15.0,
35.0,
43.0,
39.0,
61.0,
86.0,
99.0,
83.0,
131.0,
131.0,
107.0,
101.0,
112.0,
86.0,
76.0,
69.0,
57.0,
39.0,
32.0,
17.0,
11.0,
8.0,
1.0,
3.0,
3.0,
0.0,
1.0,
0.0,
1.0,
0.0,
0.0,
],
[
0.0,
0.0,
0.0,
0.0,
0.0,
1.0,
0.0,
2.0,
6.0,
4.0,
11.0,
17.0,
22.0,
20.0,
34.0,
27.0,
46.0,
80.0,
69.0,
71.0,
76.0,
79.0,
66.0,
82.0,
67.0,
58.0,
49.0,
32.0,
21.0,
22.0,
21.0,
9.0,
5.0,
4.0,
5.0,
2.0,
3.0,
0.0,
0.0,
0.0,
0.0,
0.0,
],
[
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
1.0,
1.0,
1.0,
2.0,
4.0,
8.0,
19.0,
15.0,
16.0,
26.0,
26.0,
49.0,
54.0,
51.0,
45.0,
46.0,
55.0,
39.0,
33.0,
40.0,
24.0,
22.0,
20.0,
15.0,
8.0,
11.0,
4.0,
2.0,
0.0,
0.0,
0.0,
0.0,
1.0,
0.0,
0.0,
0.0,
],
[
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
1.0,
0.0,
1.0,
2.0,
1.0,
6.0,
8.0,
12.0,
15.0,
28.0,
24.0,
25.0,
30.0,
39.0,
34.0,
28.0,
27.0,
27.0,
22.0,
18.0,
10.0,
11.0,
6.0,
4.0,
9.0,
1.0,
2.0,
2.0,
1.0,
1.0,
1.0,
0.0,
0.0,
0.0,
0.0,
0.0,
],
[
0.0,
0.0,
0.0,
0.0,
0.0,
1.0,
1.0,
0.0,
1.0,
2.0,
0.0,
4.0,
5.0,
5.0,
9.0,
12.0,
13.0,
22.0,
22.0,
19.0,
23.0,
21.0,
20.0,
20.0,
10.0,
20.0,
11.0,
8.0,
5.0,
5.0,
4.0,
0.0,
2.0,
2.0,
2.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
],
[
0.0,
0.0,
0.0,
0.0,
0.0,
2.0,
1.0,
1.0,
0.0,
1.0,
3.0,
2.0,
3.0,
1.0,
4.0,
4.0,
10.0,
11.0,
13.0,
16.0,
12.0,
9.0,
18.0,
19.0,
6.0,
8.0,
5.0,
5.0,
1.0,
4.0,
0.0,
2.0,
1.0,
1.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
],
[
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
1.0,
1.0,
1.0,
3.0,
5.0,
3.0,
1.0,
5.0,
11.0,
2.0,
5.0,
3.0,
8.0,
4.0,
3.0,
6.0,
4.0,
1.0,
2.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
],
[
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
1.0,
0.0,
0.0,
1.0,
4.0,
0.0,
3.0,
2.0,
3.0,
4.0,
4.0,
8.0,
3.0,
6.0,
2.0,
2.0,
4.0,
1.0,
1.0,
2.0,
0.0,
0.0,
1.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
],
[
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
1.0,
1.0,
1.0,
1.0,
1.0,
2.0,
4.0,
2.0,
1.0,
2.0,
4.0,
1.0,
1.0,
1.0,
1.0,
2.0,
0.0,
0.0,
0.0,
1.0,
0.0,
0.0,
0.0,
0.0,
1.0,
0.0,
0.0,
0.0,
0.0,
],
[
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
1.0,
0.0,
2.0,
3.0,
1.0,
0.0,
2.0,
3.0,
0.0,
1.0,
1.0,
1.0,
1.0,
1.0,
0.0,
0.0,
0.0,
0.0,
1.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
],
[
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
1.0,
0.0,
0.0,
0.0,
0.0,
1.0,
0.0,
0.0,
2.0,
3.0,
0.0,
0.0,
0.0,
0.0,
1.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
],
[
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
1.0,
0.0,
0.0,
0.0,
0.0,
1.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
],
[
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
1.0,
2.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
1.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
],
]
assert xedges.tolist() == [
-numpy.inf,
-4.0,
-3.8,
-3.6,
-3.4,
-3.2,
-3.0,
-2.8,
-2.5999999999999996,
-2.4,
-2.2,
-2.0,
-1.7999999999999998,
-1.5999999999999996,
-1.4,
-1.1999999999999997,
-1.0,
-0.7999999999999998,
-0.5999999999999996,
-0.3999999999999999,
-0.19999999999999973,
0.0,
0.20000000000000018,
0.40000000000000036,
0.6000000000000005,
0.8000000000000007,
1.0,
1.2000000000000002,
1.4000000000000004,
1.6000000000000005,
1.8000000000000007,
2.0,
2.2,
2.4000000000000004,
2.6000000000000005,
2.8000000000000007,
3.0,
3.2,
3.4000000000000004,
3.6000000000000005,
3.8000000000000007,
4.0,
numpy.inf,
]
assert yedges.tolist() == [
-numpy.inf,
-4.0,
-3.8,
-3.6,
-3.4,
-3.2,
-3.0,
-2.8,
-2.5999999999999996,
-2.4,
-2.2,
-2.0,
-1.7999999999999998,
-1.5999999999999996,
-1.4,
-1.1999999999999997,
-1.0,
-0.7999999999999998,
-0.5999999999999996,
-0.3999999999999999,
-0.19999999999999973,
0.0,
0.20000000000000018,
0.40000000000000036,
0.6000000000000005,
0.8000000000000007,
1.0,
1.2000000000000002,
1.4000000000000004,
1.6000000000000005,
1.8000000000000007,
2.0,
2.2,
2.4000000000000004,
2.6000000000000005,
2.8000000000000007,
3.0,
3.2,
3.4000000000000004,
3.6000000000000005,
3.8000000000000007,
4.0,
numpy.inf,
]
f["hpxpy"].errors()
def test_numpy_profile():
# python -c 'import ROOT, skhep_testdata; f = ROOT.TFile(skhep_testdata.data_path("uproot-hepdata-example.root")); h = f.Get("hprof"); h.SetErrorOption("g"); print(repr(h.GetErrorOption())); print([h.GetBinError(i) for i in range(102)])'
with uproot.open(skhep_testdata.data_path("uproot-hepdata-example.root")) as f:
obj = f["hprof"]
assert obj.axis().edges(flow=True).tolist() == [
-numpy.inf,
-4.0,
-3.92,
-3.84,
-3.76,
-3.68,
-3.6,
-3.52,
-3.44,
-3.36,
-3.2800000000000002,
-3.2,
-3.12,
-3.04,
-2.96,
-2.88,
-2.8,
-2.7199999999999998,
-2.6399999999999997,
-2.56,
-2.48,
-2.4,
-2.3200000000000003,
-2.24,
-2.16,
-2.08,
-2.0,
-1.92,
-1.8399999999999999,
-1.7599999999999998,
-1.6800000000000002,
-1.6,
-1.52,
-1.44,
-1.3599999999999999,
-1.2799999999999998,
-1.1999999999999997,
-1.12,
-1.04,
-0.96,
-0.8799999999999999,
-0.7999999999999998,
-0.7199999999999998,
-0.6400000000000001,
-0.56,
-0.48,
-0.3999999999999999,
-0.31999999999999984,
-0.23999999999999977,
-0.16000000000000014,
-0.08000000000000007,
0.0,
0.08000000000000007,
0.16000000000000014,
0.2400000000000002,
0.3200000000000003,
0.40000000000000036,
0.4800000000000004,
0.5600000000000005,
0.6399999999999997,
0.7199999999999998,
0.7999999999999998,
0.8799999999999999,
0.96,
1.04,
1.12,
1.2000000000000002,
1.2800000000000002,
1.3600000000000003,
1.4400000000000004,
1.5200000000000005,
1.6000000000000005,
1.6799999999999997,
1.7599999999999998,
1.8399999999999999,
1.92,
2.0,
2.08,
2.16,
2.24,
2.3200000000000003,
2.4000000000000004,
2.4800000000000004,
2.5600000000000005,
2.6400000000000006,
2.7199999999999998,
2.8,
2.88,
2.96,
3.04,
3.12,
3.2,
3.2800000000000002,
3.3600000000000003,
3.4400000000000004,
3.5200000000000005,
3.6000000000000005,
3.6799999999999997,
3.76,
3.84,
3.92,
4.0,
numpy.inf,
]
assert obj.values(flow=True).tolist() == [
17.99833583831787,
17.05295467376709,
16.96826426188151,
15.189482688903809,
13.73788833618164,
13.375219821929932,
13.510369300842285,
12.646300633748373,
12.66011929512024,
11.824836373329163,
11.623446782430014,
11.472076733907064,
10.052986780802408,
10.030597317603327,
9.614417321341378,
8.776622557640076,
8.620806604623795,
8.179968640208244,
7.4127079410317505,
7.497226472254153,
6.980819525257234,
6.505285000189756,
6.251851732080633,
5.813575813074431,
5.584403858840011,
5.011047506171846,
4.91228925087014,
4.524659741255972,
4.24002511460382,
4.077462992146468,
3.638793389923525,
3.5221418274773493,
3.255871357954093,
2.961020285108953,
2.706199676046999,
2.5841911697177635,
2.3627997641933374,
2.1493446517490598,
2.0077903614940302,
1.8382392522714865,
1.712551970266353,
1.6131308919867815,
1.449079261311019,
1.3471352570103472,
1.245844892917823,
1.1707659457058741,
1.1247396327430272,
1.1198479739799145,
1.0281285326813325,
1.0417602170529079,
1.0197545518784679,
1.0003131686022901,
1.0794705348466953,
1.02964734215157,
1.0603044479791786,
1.1542847645715888,
1.1745855332784314,
1.317462644113901,
1.2909844154549628,
1.4553258675057892,
1.5839730073833629,
1.7274112791524214,
1.8171250952244693,
1.999616364569922,
2.1976474514968105,
2.332895248766955,
2.573682461088714,
2.7457328102556744,
2.9121971759978718,
3.157701852473807,
3.3310595230272195,
3.685565097902363,
4.011118740219254,
4.3144918141177175,
4.548257073418039,
4.93563452094951,
5.191882547210245,
5.4767660945653915,
5.7347985672950745,
6.18110868574559,
6.4068912520553125,
7.048662836268797,
7.238576850891113,
7.555341683077009,
8.169158785842185,
9.019065893613375,
8.789572896184149,
9.365243797302247,
9.570246945256772,
10.279665088653564,
11.086111783981323,
11.118131773812431,
12.656685405307346,
12.176475048065186,
12.393176078796387,
16.518978118896484,
13.303139686584473,
14.635026613871256,
14.96741771697998,
0.0,
18.32199478149414,
17.8403746287028,
]
assert obj.errors(flow=True).tolist() == [
0.2425426377130359,
0.7421210342302459,
0.4940066334987832,
0.0,
0.0,
0.2464980351520863,
0.5555373736396868,
0.24357921956140027,
0.224616129931814,
0.34906168361481404,
0.4356334723283742,
0.5128651082538828,
0.2086307384620165,
0.28308077003120913,
0.2891541406820913,
0.16769727425722117,
0.1725773236590863,
0.12765099099147656,
0.10176558165942572,
0.15209837443095275,
0.11509671433352467,
0.10149120489291587,
0.11432069747168126,
0.09759737443630617,
0.0925726825400381,
0.06761852807106097,
0.07883833461255244,
0.06391971743421765,
0.07016808339801081,
0.0679063456384074,
0.05330254783019173,
0.056304893803072076,
0.055238305812566516,
0.047974962128087315,
0.042558147198316985,
0.04422411577185198,
0.0408986879854767,
0.03453675368752007,
0.039438577439864786,
0.03461426584130604,
0.036187944978430614,
0.034085467706933194,
0.03170797279308202,
0.031219377450826796,
0.03011256422687173,
0.02926608780683337,
0.0301281364334744,
0.029773650810830235,
0.029748389712173053,
0.03081957669527989,
0.03132949553456636,
0.02939420318612115,
0.029258470846132534,
0.02930430026995912,
0.02804401796249436,
0.031175984988258274,
0.030108329759273612,
0.03149116682767534,
0.029094905772258012,
0.03256760040302268,
0.034455467521643364,
0.03480207320474039,
0.032712202513451534,
0.03860859020725239,
0.03885261043325975,
0.03856340740992072,
0.04624045482680718,
0.04543317885660241,
0.04864621055120345,
0.05203738725490573,
0.043244016740287015,
0.05850656051444226,
0.059709748394490884,
0.06594229969906718,
0.07220151434675717,
0.08170131663135467,
0.08712811029061408,
0.08092332833341198,
0.09191356506835095,
0.10837656197125221,
0.10509032780349721,
0.1549338147492931,
0.12013956272890565,
0.11435861802671626,
0.18394299511064918,
0.36368702093446753,
0.13346262669376094,
0.18325723104438668,
0.17988975869975438,
0.1926530171606879,
0.352473088726965,
0.18420322865597596,
0.5959353241264886,
0.21540243485684468,
0.11755951260322403,
1.6619844323502102,
0.1352812684763272,
0.4534391377411209,
0.0,
0.0,
0.0,
0.16817919583370047,
]
assert obj.errors(
flow=True, error_mode=uproot.behaviors.TProfile._kERRORSPREAD
).tolist() == [
0.34300708770751953,
1.0495176315307617,
0.8556445884959498,
0.0,
0.0,
0.3486008644104004,
1.1110747472793736,
0.5966447998707816,
0.7780930984827886,
0.9872955341457128,
1.3069004169851226,
1.9863180231181519,
0.8080233755703451,
1.5761270231822468,
1.7106589658888625,
1.0606106881094808,
1.3806185892726903,
1.0212079279318125,
0.9158902349348315,
1.5806526735782713,
1.281662768690052,
1.2676247428226026,
1.4684759475789604,
1.4109488746385728,
1.4984197698897908,
1.1653166117127,
1.560919388615718,
1.3285463784181335,
1.5147207420285738,
1.549991160077581,
1.3099853470686935,
1.443207670599461,
1.5506131361772943,
1.4416456163169384,
1.3979557820249364,
1.4898998932597651,
1.39295911912831,
1.284377246895075,
1.5059134195962758,
1.3897530746031688,
1.5224763480325734,
1.473186374916331,
1.367860043067912,
1.377195694990315,
1.3845231787179089,
1.3648794718765778,
1.4153812430343926,
1.419488271301224,
1.419219569870578,
1.4873439583962957,
1.5106535672672314,
1.4343045945107848,
1.4384340328933711,
1.4248038889030987,
1.340257624082002,
1.4582898146438432,
1.4006037738107093,
1.453377907771706,
1.2814976672937608,
1.4041886411676958,
1.446719393622703,
1.4349262381362273,
1.3129783240312063,
1.4747268574003336,
1.4485303652651937,
1.3563140181188076,
1.5217255253773476,
1.4693963839287074,
1.449624425594751,
1.4270014133077806,
1.1779530457556422,
1.517791441678946,
1.406668404280142,
1.522396207351309,
1.3812963022723197,
1.5884551434189818,
1.4369536067546675,
1.2947732533345917,
1.2998541028572388,
1.429585037043725,
1.2073959432248138,
1.6830120202858494,
1.2013956272890565,
1.0788570447521093,
1.705817161574992,
2.271224717779226,
0.811821464847988,
0.9162861552219334,
0.8627209754934005,
0.8615704848834633,
1.40989235490786,
0.6892253711682418,
1.787805972379466,
0.7461759224922005,
0.3325085142189005,
2.350400924682617,
0.1913166046142578,
1.1106945168733242,
0.0,
0.0,
0.0,
0.29129491196004526,
]
assert obj.errors(
flow=True, error_mode=uproot.behaviors.TProfile._kERRORSPREADI
).tolist() == [
0.2425426377130359,
0.7421210342302459,
0.4940066334987832,
0.2886751345948129,
0.2886751345948129,
0.2464980351520863,
0.5555373736396868,
0.24357921956140027,
0.224616129931814,
0.34906168361481404,
0.4356334723283742,
0.5128651082538828,
0.2086307384620165,
0.28308077003120913,
0.2891541406820913,
0.16769727425722117,
0.1725773236590863,
0.12765099099147656,
0.10176558165942572,
0.15209837443095275,
0.11509671433352467,
0.10149120489291587,
0.11432069747168126,
0.09759737443630617,
0.0925726825400381,
0.06761852807106097,
0.07883833461255244,
0.06391971743421765,
0.07016808339801081,
0.0679063456384074,
0.05330254783019173,
0.056304893803072076,
0.055238305812566516,
0.047974962128087315,
0.042558147198316985,
0.04422411577185198,
0.0408986879854767,
0.03453675368752007,
0.039438577439864786,
0.03461426584130604,
0.036187944978430614,
0.034085467706933194,
0.03170797279308202,
0.031219377450826796,
0.03011256422687173,
0.02926608780683337,
0.0301281364334744,
0.029773650810830235,
0.029748389712173053,
0.03081957669527989,
0.03132949553456636,
0.02939420318612115,
0.029258470846132534,
0.02930430026995912,
0.02804401796249436,
0.031175984988258274,
0.030108329759273612,
0.03149116682767534,
0.029094905772258012,
0.03256760040302268,
0.034455467521643364,
0.03480207320474039,
0.032712202513451534,
0.03860859020725239,
0.03885261043325975,
0.03856340740992072,
0.04624045482680718,
0.04543317885660241,
0.04864621055120345,
0.05203738725490573,
0.043244016740287015,
0.05850656051444226,
0.059709748394490884,
0.06594229969906718,
0.07220151434675717,
0.08170131663135467,
0.08712811029061408,
0.08092332833341198,
0.09191356506835095,
0.10837656197125221,
0.10509032780349721,
0.1549338147492931,
0.12013956272890565,
0.11435861802671626,
0.18394299511064918,
0.36368702093446753,
0.13346262669376094,
0.18325723104438668,
0.17988975869975438,
0.1926530171606879,
0.352473088726965,
0.18420322865597596,
0.5959353241264886,
0.21540243485684468,
0.11755951260322403,
1.6619844323502102,
0.1352812684763272,
0.4534391377411209,
0.2886751345948129,
0.0,
0.2886751345948129,
0.16817919583370047,
]
assert obj.errors(
flow=True, error_mode=uproot.behaviors.TProfile._kERRORSPREADG
).tolist() == [
0.7071067811865475,
0.7071067811865475,
0.5773502691896258,
1.0,
1.0,
0.7071067811865475,
0.5,
0.4082482904638631,
0.2886751345948129,
0.35355339059327373,
0.3333333333333333,
0.2581988897471611,
0.2581988897471611,
0.1796053020267749,
0.1690308509457033,
0.15811388300841897,
0.125,
0.125,
0.1111111111111111,
0.09622504486493763,
0.08980265101338746,
0.08006407690254357,
0.0778498944161523,
0.06917144638660747,
0.06178020632152154,
0.058025885318565944,
0.050507627227610534,
0.048112522432468816,
0.04632410546120795,
0.04381079543383235,
0.04068942293855797,
0.03901371573204352,
0.035623524993954825,
0.033277916281986085,
0.03044312827739915,
0.02968260885977624,
0.029361010975735173,
0.026889882837002246,
0.026189140043946204,
0.024906774069335894,
0.023769134427076417,
0.023137240669137377,
0.023180714250535184,
0.022668802672263903,
0.021749411414517784,
0.021442250696755896,
0.021286234067143354,
0.020974918506045256,
0.020961090407515925,
0.020721216851891204,
0.020739033894608506,
0.02049369659597791,
0.020340502363726694,
0.02056725174474318,
0.02092434876593436,
0.02137845624045064,
0.02149667901961739,
0.021667569500871973,
0.022703830459324992,
0.023193180352135665,
0.023816275411477048,
0.024253562503633298,
0.024914503091731197,
0.026180163474687157,
0.026822089039291005,
0.028432506701809173,
0.0303868562731382,
0.030919620705155318,
0.033557802760701215,
0.03646624787447364,
0.036711154910717615,
0.03854716722458499,
0.04244763599780089,
0.043314808182421,
0.05227083734893167,
0.05143444998736397,
0.06063390625908324,
0.0625,
0.07071067811865475,
0.07580980435789034,
0.08703882797784893,
0.09205746178983235,
0.1,
0.105999788000636,
0.10783277320343841,
0.16012815380508713,
0.1643989873053573,
0.2,
0.20851441405707477,
0.22360679774997896,
0.25,
0.2672612419124244,
0.3333333333333333,
0.2886751345948129,
0.35355339059327373,
0.7071067811865475,
0.7071067811865475,
0.4082482904638631,
1.0,
0.0,
1.0,
0.5773502691896258,
]
def test_boost_1d():
boost_histogram = pytest.importorskip("boost_histogram")
with uproot.open(skhep_testdata.data_path("uproot-hepdata-example.root")) as f:
f["hpx"].to_boost()
def test_boost_2d():
boost_histogram = pytest.importorskip("boost_histogram")
with uproot.open(skhep_testdata.data_path("uproot-hepdata-example.root")) as f:
f["hpxpy"].to_boost()
def test_hist_1d():
hist = pytest.importorskip("hist")
with uproot.open(skhep_testdata.data_path("uproot-hepdata-example.root")) as f:
f["hpx"].to_hist()
def test_hist_2d():
hist = pytest.importorskip("hist")
with uproot.open(skhep_testdata.data_path("uproot-hepdata-example.root")) as f:
f["hpxpy"].to_hist()
| 23.612654
| 241
| 0.265012
| 5,652
| 67,178
| 3.140481
| 0.145435
| 0.160901
| 0.218366
| 0.261634
| 0.577239
| 0.539211
| 0.539099
| 0.519831
| 0.510761
| 0.502592
| 0
| 0.679195
| 0.645911
| 67,178
| 2,844
| 242
| 23.620956
| 0.067011
| 0.004719
| 0
| 0.805832
| 0
| 0
| 0.003949
| 0.002827
| 0
| 0
| 0
| 0
| 0.003912
| 1
| 0.002489
| false
| 0
| 0.003201
| 0
| 0.00569
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
ab132bd86eea337e261f2d94fe53829ba56df939
| 958
|
py
|
Python
|
cowsay/lib/cows/glados.py
|
Ovlic/cowsay_py
|
1ee8d11d6d895d7695d57e26003d71ce18379d3b
|
[
"MIT"
] | null | null | null |
cowsay/lib/cows/glados.py
|
Ovlic/cowsay_py
|
1ee8d11d6d895d7695d57e26003d71ce18379d3b
|
[
"MIT"
] | null | null | null |
cowsay/lib/cows/glados.py
|
Ovlic/cowsay_py
|
1ee8d11d6d895d7695d57e26003d71ce18379d3b
|
[
"MIT"
] | null | null | null |
def Glados(thoughts, eyes, eye, tongue):
return f"""
{thoughts}
{thoughts}
\#+ \@ \# \# M#\@
. .X X.%##\@;# \# +\@#######X. \@#%
,==. ,######M+ -#####%M####M- \#
:H##M%:=##+ .M##M,;#####/+#######% ,M#
.M########= =\@#\@.=#####M=M#######= X#
:\@\@MMM##M. -##M.,#######M#######. = M
\@##..###:. .H####. \@\@ X,
\############: \###,/####; /##= \@#. M
,M## ;##,\@#M;/M#M \@# X#% X#
.%= \######M## \##.M#: ./#M ,M \#M ,#\$
\##/ \$## \#+;#: \#### ;#/ M M- \@# :
\#+ \#M\@MM###M-;M \#:\$#-##\$H# .#X \@ + \$#. \#
\######/.: \#%=# M#:MM./#.-# \@#: H#
+,.= \@###: /\@ %#,\@ \##\@X \#,-#\@.##% .\@#
\#####+;/##/ \@## \@#,+ /#M . X,
;###M#\@ M###H .#M- ,##M ;\@\@; \###
.M#M##H ;####X ,\@#######M/ -M###\$ -H
.M###% X####H .\@\@MM\@; ;\@#M\@
H#M /\@####/ ,++. / ==-,
,=/:, .+X\@MMH\@#H \#####\$=
"""
| 38.32
| 49
| 0.136743
| 81
| 958
| 1.617284
| 0.17284
| 0.427481
| 0.435115
| 0.396947
| 0.412214
| 0.328244
| 0.282443
| 0.122137
| 0
| 0
| 0
| 0
| 0.295407
| 958
| 25
| 50
| 38.32
| 0.194074
| 0
| 0
| 0.08
| 0
| 0
| 0.93952
| 0.070907
| 0
| 0
| 0
| 0
| 0
| 1
| 0.04
| false
| 0
| 0
| 0.04
| 0.08
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
ab27c5cfd1e89a0a65c6c608c191e8a107d3fab8
| 38
|
py
|
Python
|
main.py
|
risker93/king
|
b7cac65595960a81236cb0d9d004d4f3ffe1edf0
|
[
"Apache-2.0"
] | 2
|
2021-05-02T12:23:27.000Z
|
2021-05-02T12:56:25.000Z
|
main.py
|
risker93/king
|
b7cac65595960a81236cb0d9d004d4f3ffe1edf0
|
[
"Apache-2.0"
] | null | null | null |
main.py
|
risker93/king
|
b7cac65595960a81236cb0d9d004d4f3ffe1edf0
|
[
"Apache-2.0"
] | null | null | null |
print("I'm the king of the world!!")
| 12.666667
| 36
| 0.631579
| 8
| 38
| 3
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.184211
| 38
| 2
| 37
| 19
| 0.774194
| 0
| 0
| 0
| 0
| 0
| 0.72973
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
db9f50f7f6a511f59f5eeb13061ff69713fcf6cf
| 32,339
|
py
|
Python
|
autodiff/ad.py
|
sandrawing/cs107-FinalProject
|
f8f884d22f55c3b47d9524621bdcab41d69b2690
|
[
"MIT"
] | null | null | null |
autodiff/ad.py
|
sandrawing/cs107-FinalProject
|
f8f884d22f55c3b47d9524621bdcab41d69b2690
|
[
"MIT"
] | null | null | null |
autodiff/ad.py
|
sandrawing/cs107-FinalProject
|
f8f884d22f55c3b47d9524621bdcab41d69b2690
|
[
"MIT"
] | null | null | null |
import numpy as np
class AutoDiff():
"""
Forward Mode Implementation of Automatic Differentiation
The class overloads the basic operations, including the unary operation,
and contains some elemental functions
"""
def __init__(self, val, der=1, name="not_specified"):
"""
constructor for AutoDiff class
Initializes AutoDiff object with a value, derivative and name that was passed in
and converts the type of value to numpy array for handling multiple values
converts the type of derivatives to a dictionary for handling multiple variables
INPUT
=======
val: value of the current variable
der: derivative of the current variable
name: name of the current variable
RETURNS
=======
AutoDiff object: self.val, self.der, and self.name
Example:
>>> x = AutoDiff([5,6], [1, 7], "x")
>>> print(x.val, x.der, x.name)
[5 6] {'x': array([1, 7])} x
"""
# Handle several input types of val, including float, int, list and np.ndarray
if isinstance(val, (float, int, np.int32, np.int64, np.float64)):
val = [val]
self.val = np.array(val)
elif isinstance(val, list):
self.val = np.array(val)
elif isinstance(val, np.ndarray):
self.val = val
else:
raise TypeError("Invalid Type for val! ")
# Handle several input types of val, including float, int, list and dict
if type(der) == dict:
self.der = der
elif type(der) == list:
self.der = {name: np.array(der)}
elif isinstance(der, (float, int, np.int64, np.float64)):
self.der = {name: np.array([der] * len(self.val))}
self.name = name
def get_variables(self):
"""
INPUT
=======
None
RETURNS
=======
set of variable names
Example:
>>> x = AutoDiff([5,6], [1, 7], "x")
>>> x.get_variables()
{'x'}
"""
return set(self.der.keys())
"""Basic Operations"""
def __add__(self, other):
"""
Overloads the addition operation
INPUT
=======
other: Scalar or AutoDiff Object
RETURNS
=======
AutoDiff object: A new AutoDiff object which is the result of the addition operation
performed between the AutoDiff object and the argument that was passed in
EXAMPLES
=======
>>> x = AutoDiff(5, 10, "x")
>>> f1 = x + 100
>>> print(f1.val, f1.der)
[105.] {'x': array([10])}
>>> x = AutoDiff([8, 4], [10, 11], 'x')
>>> y = AutoDiff([9, 12], [20, 33], 'y')
>>> f1 = x + y
>>> print(f1.val, f1.der["x"], f1.der["y"])
[17 16] [10 11] [20 33]
"""
temp_der = {}
if isinstance(other, (int, float)):
# Add a scalar to a AutoDiff object
return AutoDiff(self.val + float(other), self.der.copy(), self.name)
elif isinstance(other, AutoDiff):
# Add two AutoDiff objects
var_union = self.get_variables().union(other.get_variables())
temp_val = self.val + other.val
for variable in var_union:
temp_der[variable] = self.der.get(variable, 0) + other.der.get(variable, 0)
return AutoDiff(temp_val, temp_der, self.name)
else:
raise TypeError("Invalid input type!")
def __radd__(self, other):
"""
INPUT
=======
other: Scalar or AutoDiff Object
RETURNS
=======
AutoDiff object: A new AutoDiff object which is the result of the addition operation
performed between the argument that was passed in and the AutoDiff object
EXAMPLES
=======
>>> x = AutoDiff(5, 10, "x")
>>> f1 = 100 + x
>>> print(f1.val, f1.der)
[105.] {'x': array([10])}
>>> x = AutoDiff([8, 4], [10, 11], 'x')
>>> y = AutoDiff([9, 12], [20, 33], 'y')
>>> f1 = y + x
>>> print(f1.val, f1.der["x"], f1.der["y"])
[17 16] [10 11] [20 33]
"""
return self.__add__(other)
def __mul__(self, other):
"""
Overloads the multiplication operation
Inputs: Scalar or AutoDiff Instance
Returns: A new AutoDiff object which is the result of the multiplication operation
performed between the AutoDiff object and the argument that was passed in
INPUT
=======
other: Scalar or AutoDiff Object
RETURNS
=======
AutoDiff object: A new AutoDiff object which is the result of the addition operation
performed between the argument that was passed in and the AutoDiff object
EXAMPLES
=======
>>> x = AutoDiff(5, name="x")
>>> f1 = 100 * x
>>> print(f1.val, f1.der)
[500.] {'x': array([100])}
>>> x = AutoDiff([8, 4], name='x')
>>> y = AutoDiff([9, 12], name='y')
>>> f1 = y * x
>>> print(f1.val, f1.der["x"], f1.der["y"])
[72 48] [ 9 12] [8 4]
"""
temp_der = {}
if isinstance(other, (int, float)):
# Multiply a scalar to a AutoDiff object
for variable in self.get_variables():
temp_der[variable] = self.der[variable] * other
return AutoDiff(self.val * float(other), temp_der, self.name)
elif isinstance(other, AutoDiff):
# Multiply two AutoDiff objects
var_union = self.get_variables().union(other.get_variables())
for variable in var_union:
temp_der[variable] = self.val * other.der.get(variable, 0) + other.val * self.der.get(variable, 0)
return AutoDiff(self.val * other.val, temp_der, self.name)
else:
raise TypeError("Invalid input type!")
def __rmul__(self, other):
"""
INPUT
=======
other: Scalar or AutoDiff Object
RETURNS
=======
AutoDiff object: A new AutoDiff object which is the result of the multiplication operation
performed between the AutoDiff object and the argument that was passed in
EXAMPLES
=======
>>> x = AutoDiff(5, name="x")
>>> f1 = x * 5
>>> print(f1.val, f1.der)
[25.] {'x': array([5])}
>>> x = AutoDiff(5, name="x")
>>> y = AutoDiff(2, name="y")
>>> result = x * y
>>> print(result.val, result.der["x"], result.der["y"])
[10] [2] [5]
"""
return self.__mul__(other)
def __sub__(self, other):
"""
Overloads the subtraction operation
INPUT
=======
other: Scalar or AutoDiff Object
RETURNS
=======
AutoDiff object: A new AutoDiff object which is the result of the subtraction operation
performed between the AutoDiff object and the argument that was passed in
EXAMPLES
=======
>>> x = AutoDiff(5, name="x")
>>> f1 = x - 100
>>> print(f1.val, f1.der)
[-95.] {'x': array([1])}
>>> x = AutoDiff([8, 4], name='x')
>>> y = AutoDiff([9, 12], name="y")
>>> result = x - y
>>> print(result.val, result.der["x"], result.der["y"])
[-1 -8] [1 1] [-1 -1]
"""
temp_der = {}
if isinstance(other, (int, float)):
# Subtract a scalar from a AutoDiff object
return AutoDiff(self.val - float(other), self.der.copy(), self.name)
elif isinstance(other, AutoDiff):
# Subtract two AutoDiff objects
var_union = self.get_variables().union(other.get_variables())
temp_val = self.val - other.val
for variable in var_union:
temp_der[variable] = self.der.get(variable, 0) - other.der.get(variable, 0)
return AutoDiff(temp_val, temp_der, self.name)
else:
raise TypeError("Invalid input type!")
def __rsub__(self, other):
"""
INPUT
=======
other: Scalar or AutoDiff Object
RETURNS
=======
AutoDiff object: A new AutoDiff object which is the result of the subtraction operation
performed between the AutoDiff object and the argument that was passed in
EXAMPLES
=======
>>> x = AutoDiff(5, name="x")
>>> f1 = 100 - x
>>> print(f1.val, f1.der)
[95.] {'x': array([-1])}
>>> x = AutoDiff([8, 4], name='x')
>>> y = AutoDiff([9, 12], name="y")
>>> result = y - x
>>> print(result.val, result.der["x"], result.der["y"])
[1 8] [-1 -1] [1 1]
"""
return -self + other
def __pow__(self, other):
"""
Overloads the power operation
INPUT
=======
other: Scalar or AutoDiff Object
RETURNS
=======
AutoDiff object: A new AutoDiff object which is the result of the AutoDiff object being
raised to the power of the argument that was passed in
EXAMPLES
=======
>>> x = AutoDiff(2, name="x")
>>> f1 = x ** 2
>>> print(f1.val, f1.der)
[4.] {'x': array([4.])}
>>> x = AutoDiff([3, 2], name='x')
>>> y = AutoDiff([-2, 5], name='y')
>>> result = (x ** y)
>>> print(result.val, result.der["x"], result.der["y"])
[ 0.11111111 32. ] [-7.40740741e-02 8.00000000e+01] [ 0.12206803 22.18070978]
"""
temp_der = {}
if isinstance(other, (int, float)):
# An AutoDiff object powered by a scalar
temp_val = np.array([float(v) ** other for v in self.val])
for variable in self.get_variables():
curr_val = np.array([float(v) ** (other - 1) for v in self.val])
temp_der[variable] = other * np.array(curr_val) * self.der[variable]
return AutoDiff(temp_val, temp_der, self.name)
elif isinstance(other, AutoDiff):
# An AutoDiff object powered by another AutoDiff object
if len(other.val) == 1:
other_val = other.val * np.ones(self.val.shape)
elif len(other.val) != len(self.val):
raise ValueError("You must have two vectors of the same length to use power on both.")
else:
other_val = other.val[:]
var_union = self.get_variables().union(other.get_variables())
temp_val = np.array([float(v) ** (o) for v, o in zip(self.val, other_val)])
for variable in var_union:
curr_val = np.array([float(v) ** (o - 1) for v, o in zip(self.val, other_val)])
temp_der[variable] = curr_val * (other_val * self.der.get(variable, 0) +
self.val * np.log(self.val) * other.der.get(variable, 0))
return AutoDiff(temp_val, temp_der, self.name)
else:
raise TypeError("Invalid input type!")
def __rpow__(self, other):
"""
INPUT
=======
other: Scalar or AutoDiff Object
RETURNS
=======
AutoDiff object: A new AutoDiff object which is the result of the argument that was
passed in being raised to the power of the AutoDiff object
EXAMPLES
=======
>>> x = AutoDiff(2, name="x")
>>> f1 = 2 ** x
>>> print(f1.val, f1.der)
[4.] {'x': array([2.77258872])}
>>> x = AutoDiff([-3, 2], name='x')
>>> y = AutoDiff([2, 5], name='y')
>>> result = (x.__rpow__(y))
>>> print(result.val, result.der["x"], result.der["y"])
[ 0.125 25. ] [ 0.0866434 40.23594781] [-0.1875 10. ]
"""
temp_der = {}
if isinstance(other, (int, float)):
# A scalar powered by an AutoDiff object
temp_val = np.array([other ** float(v) for v in self.val])
for variable in self.get_variables():
curr_val = np.array([other ** float(v) for v in self.val])
temp_der[variable] = np.log(other) * curr_val * self.der[variable]
return AutoDiff(temp_val, temp_der, self.name)
elif isinstance(other, AutoDiff):
if len(other.val) == 1:
other_val = other.val * np.ones(self.val.shape)
elif len(other.val) != len(self.val):
raise ValueError("You must have two vectors of the same length to use power on both.")
else:
other_val = other.val[:]
var_union = self.get_variables().union(other.get_variables())
temp_val = np.array([float(o) ** float(v) for v, o in zip(self.val, other_val)])
for variable in var_union:
curr_val = np.array([float(o) ** (float(v) - 1) for v, o in zip(self.val, other_val)])
temp_der[variable] = curr_val * (other_val * self.der.get(variable, 0) * np.log(other_val) +
self.val * other.der.get(variable, 0))
return AutoDiff(temp_val, temp_der, self.name)
else:
raise TypeError("Invalid input type!")
def __truediv__(self, other):
"""
Overloads the division operation
INPUT
=======
other: Scalar or AutoDiff Object
RETURNS
=======
AutoDiff object: A new AutoDiff object which is the result of the AutoDiff
object divided by the argument that was passed in
EXAMPLES
=======
>>> x = AutoDiff(2, name="x")
>>> f1 = x / 2
>>> print(f1.val, f1.der)
[1.] {'x': array([0.5])}
>>> x = AutoDiff([16, 0], name="x")
>>> y = AutoDiff([8, -1], name="y")
>>> result = (x/y)
>>> print(result.val, result.der["x"], result.der["y"])
[ 2. -0.] [ 0.125 -1. ] [-0.25 -0. ]
"""
return self * (other ** (-1))
def __rtruediv__(self, other):
"""
INPUT
=======
other: Scalar or AutoDiff Object
RETURNS
=======
AutoDiff object: A new AutoDiff object which is the result of the AutoDiff
object divided by the argument that was passed in
EXAMPLES
=======
>>> x = AutoDiff(2, name="x")
>>> f1 = 2 / x
>>> print(f1.val, f1.der)
[1.] {'x': array([-0.5])}
>>> x = AutoDiff([16, 2], name="x")
>>> y = AutoDiff([8, -1], name="y")
>>> result = y / x
>>> print(result.val, result.der["x"], result.der["y"])
[ 0.5 -0.5] [-0.03125 0.25 ] [0.0625 0.5 ]
"""
return other * (self ** (-1))
def __neg__(self):
"""
INPUT
=======
other: Scalar or AutoDiff Object
RETURNS
=======
AutoDiff object: A new AutoDiff object which has the signs of
the value and derivative reversed
EXAMPLES
=======
>>> x = AutoDiff(2, name="x")
>>> f1 = -x
>>> print(f1.val, f1.der)
[-2] {'x': array([-1])}
"""
temp_der = {}
for variable in self.get_variables():
temp_der[variable] = -self.der.get(variable, 0)
return AutoDiff(-self.val, temp_der, self.name)
def __eq__(self, other):
"""
Overloads the equal comparision operator (==)
INPUT
=======
other: Scalar or AutoDiff Object
RETURNS
=======
If the input is scalar:
True if the length of val of self AutoDiff instance is 1 and
the value of element in self.val is same as other; False if not
If the input is AutoDiff Instance:
True if self and other AutoDiff instance have the
same values and same length of values; False if not
EXAMPLES
=======
>>> x = AutoDiff(2.0, name="x")
>>> y = 2
>>> print(x==y)
True
>>> x = AutoDiff([2.0, 4.0], name="x")
>>> y = AutoDiff([2.0, 4.0], name="y")
>>> print(x==y)
True
"""
if isinstance(other, (int, float)):
return np.array_equal(self.val, np.array([float(other)]))
elif isinstance(other, AutoDiff):
return np.array_equal(self.val, other.val)
def __ne__(self, other):
"""
Overloads the not equal comparision operator (!=)
INPUT
=======
other: Scalar or AutoDiff Object
RETURNS
=======
If the input is scalar:
True if the length of val of self AutoDiff instance is not 1 or
the value of element in self.val is different from other; False if not
If the input is AutoDiff Instance:
True if self and other AutoDiff instance have different
values or different length of values; False if not
EXAMPLES
=======
>>> x = AutoDiff(2.0, name="x")
>>> y = 3
>>> print(x!=y)
True
>>> x = AutoDiff([2.0, 4.0], name="x")
>>> y = AutoDiff([2.0], name="y")
>>> print(x!=y)
True
"""
if isinstance(other, (int, float)):
return not np.array_equal(self.val, np.array([float(other)]))
elif isinstance(other, AutoDiff):
return not np.array_equal(self.val, other.val)
def __lt__(self, other):
"""
Overloads the less than comparision operator (<)
INPUT
=======
other: Scalar or AutoDiff Object
RETURNS
=======
Return the truth value of values (x1 < x2) element-wise
EXAMPLES
=======
>>> x = AutoDiff(2.0, name="x")
>>> y = 3
>>> print(x<y)
[ True]
>>> x = AutoDiff([2.0, 4.0], name="x")
>>> y = AutoDiff([2.0, 5.0], name="y")
>>> print(x<y)
[False True]
"""
if isinstance(other, (int, float)):
if len(self.val) != 1:
raise TypeError("Please compare the variables with same number of values!")
return np.less(self.val, np.array([float(other)]))
elif isinstance(other, AutoDiff):
if len(self.val) != len(other.val):
raise TypeError("Please compare the variables with same number of values!")
return np.less(self.val, other.val)
def __le__(self, other):
"""
Overloads the less than or equal to comparision operator (<=)
INPUT
=======
other: Scalar or AutoDiff Object
RETURNS
=======
Return the truth value of values (x1 <= x2) element-wise
EXAMPLES
=======
>>> x = AutoDiff(2.0, name="x")
>>> y = 3
>>> print(x<=y)
[ True]
>>> x = AutoDiff([2.0, 4.0], name="x")
>>> y = AutoDiff([2.0, 5.0], name="y")
>>> print(x<=y)
[ True True]
"""
if isinstance(other, (int, float)):
if len(self.val) != 1:
raise TypeError("Please compare the variables with same number of values!")
return np.less_equal(self.val, np.array([float(other)]))
elif isinstance(other, AutoDiff):
if len(self.val) != len(other.val):
raise TypeError("Please compare the variables with same number of values!")
return np.less_equal(self.val, other.val)
def __gt__(self, other):
"""
Overloads the greater than comparision operator (>)
Inputs
=======
Scalar or AutoDiff Instance
Returns
=======
Return the truth value of values (x1 > x2) element-wise
EXAMPLES
=======
>>> x = AutoDiff(2.0, name="x")
>>> y = 3
>>> print(y>x)
[ True]
>>> x = AutoDiff([2.0, 4.0], name="x")
>>> y = AutoDiff([3.0, 5.0], name="y")
>>> print(y>x)
[ True True]
"""
if isinstance(other, (int, float)):
if len(self.val) != 1:
raise TypeError("Please compare the variables with same number of values!")
return np.greater(self.val, np.array([float(other)]))
elif isinstance(other, AutoDiff):
if len(self.val) != len(other.val):
raise TypeError("Please compare the variables with same number of values!")
return np.greater(self.val, other.val)
def __ge__(self, other):
"""
Overloads the greater than or equal to comparision operator (>=)
Inputs
=======
Scalar or AutoDiff Instance
Returns
=======
Return the truth value of values (x1 >= x2) element-wise
EXAMPLES
=======
>>> x = AutoDiff(2.0, name="x")
>>> y = 1
>>> print(x>=y)
[ True]
>>> x = AutoDiff([2.0, 4.0], name="x")
>>> y = AutoDiff([1.0, 3.0], name="y")
>>> print(x>=y)
[ True True]
"""
if isinstance(other, (int, float)):
if len(self.val) != 1:
raise TypeError("Please compare the variables with same number of values!")
return np.greater_equal(self.val, np.array([float(other)]))
elif isinstance(other, AutoDiff):
if len(self.val) != len(other.val):
raise TypeError("Please compare the variables with same number of values!")
return np.greater_equal(self.val, other.val)
"""Elemental Function"""
def sin(self):
"""
Elementary function sin
Inputs
=======
None
Returns
=======
A new AutoDiff object with the sine
computation done on the value and derivative
EXAMPLES
=======
>>> x = AutoDiff(2, name="x")
>>> f1 = AutoDiff.sin(x)
>>> print(f1.val, f1.der)
[0.90929743] {'x': array([-0.41614684])}
"""
temp_der = {}
new_val = np.sin(self.val)
for variable in self.get_variables():
temp_der[variable] = np.cos(self.val) * self.der[variable]
return AutoDiff(new_val, temp_der, self.name)
def sinh(self):
"""
Elementary function sinh
Inputs
=======
None
Returns
=======
A new AutoDiff object with the hyperbolic sine
computation done on the value and derivative
EXAMPLES
=======
>>> x = AutoDiff(5.0, 1.0, "x")
>>> f1 = 3 * x + 2
>>> f2 = AutoDiff.sinh(f1)
>>> print(f2.val, f2.der)
[12077476.37678763] {'x': array([36232429.13036301])}
"""
new_val = np.sinh(self.val)
temp_der = {}
for variable in self.get_variables():
temp_der[variable] = np.cosh(self.val) * self.der[variable]
return AutoDiff(new_val, temp_der, self.name)
def cos(self):
"""
Elementary function cos
Inputs
=======
None
Returns
=======
A new AutoDiff object with the cosine computation
done on the value and derivative
EXAMPLES
=======
>>> x = AutoDiff(5.0, 1.0, "x")
>>> f1 = 3 * x + 2
>>> f2 = AutoDiff.cos(f1)
>>> print(f2.val, f2.der)
[-0.27516334] {'x': array([2.88419248])}
"""
new_val = np.cos(self.val)
temp_der = {}
for variable in self.get_variables():
temp_der[variable] = -np.sin(self.val) * self.der[variable]
return AutoDiff(new_val, temp_der, self.name)
def cosh(self):
"""
Elementary function cosh
Inputs
=======
None
Returns
=======
A new AutoDiff object with the hyperbolic cosine
computation done on the value and derivative
EXAMPLES
=======
>>> x = AutoDiff(5.0, 1.0, "x")
>>> f1 = 3 * x + 2
>>> f2 = AutoDiff.cosh(f1)
>>> print(f2.val, f2.der)
[12077476.37678767] {'x': array([36232429.13036288])}
"""
new_val = np.cosh(self.val)
temp_der = {}
for variable in self.get_variables():
temp_der[variable] = np.sinh(self.val) * self.der[variable]
return AutoDiff(new_val, temp_der, self.name)
def tan(self):
"""
Elementary function tan
Inputs
=======
None
Returns
=======
A new AutoDiff object with the tangent computation
done on the value and derivative
EXAMPLES
=======
>>> x = AutoDiff(5.0, 1.0, "x")
>>> f1 = 3 * x + 2
>>> f2 = AutoDiff.tan(f1)
>>> print(f2.val, f2.der)
[3.49391565] {'x': array([39.62233961])}
"""
new_val = np.tan(self.val)
temp_der = {}
for variable in self.get_variables():
temp_der[variable] = self.der[variable] / (np.cos(self.val) ** 2)
return AutoDiff(new_val, temp_der, self.name)
def tanh(self):
"""
Elementary function tanh
Inputs
=======
None
Returns
=======
A new AutoDiff object with the hyperbolic
tangent computation done on the value and derivative
EXAMPLES
=======
>>> x = AutoDiff(5.0, 1.0, "x")
>>> f1 = 3 * x + 2
>>> f2 = AutoDiff.tanh(f1)
>>> print(f2.val, f2.der)
[1.] {'x': array([2.05669012e-14])}
"""
new_val = np.tanh(self.val)
temp_der = {}
for variable in self.get_variables():
temp_der[variable] = self.der[variable] * 1 / (np.cosh(self.val) ** 2)
return AutoDiff(new_val, temp_der, self.name)
def arcsin(self):
"""
Elemtary function arcsin
Inputs
=======
None
Returns
=======
A new AutoDiff object with the hyperbolic
arcsin computation done on the value and derivative
EXAMPLES
=======
>>> x = AutoDiff(0.5, 1.0, "x")
>>> f1 = AutoDiff.arcsin(x)
>>> print(f1.val, f1.der)
[0.52359878] {'x': array([1.15470054])}
"""
new_val = np.arcsin(self.val)
temp_der = {}
for variable in self.get_variables():
temp_der[variable] = self.der[variable] * 1 / np.sqrt(1 - self.val ** 2)
return AutoDiff(new_val, temp_der, self.name)
def arccos(self):
"""
Elementary function arccos
Inputs
=======
None
Returns
=======
A new AutoDiff object with the hyperbolic
arccos computation done on the value and derivative
EXAMPLES
=======
>>> x = AutoDiff(0.5, 1.0, "x")
>>> f1 = AutoDiff.arccos(x)
>>> print(f1.val, f1.der)
[1.04719755] {'x': array([-1.15470054])}
"""
new_val = np.arccos(self.val)
temp_der = {}
for variable in self.get_variables():
temp_der[variable] = -self.der[variable] * 1 / np.sqrt(1 - self.val ** 2)
return AutoDiff(new_val, temp_der, self.name)
def arctan(self):
"""
Elementary function arctan
Inputs
=======
None
Returns
=======
A new AutoDiff object with the hyperbolic
arctan computation done on the value and derivative
EXAMPLES
=======
>>> x = AutoDiff(0.5, 1.0, "x")
>>> f1 = AutoDiff.arctan(x)
>>> print(f1.val, f1.der)
[0.46364761] {'x': array([0.8])}
"""
new_val = np.arctan(self.val)
temp_der = {}
for variable in self.get_variables():
temp_der[variable] = self.der[variable] * 1 / ((self.val ** 2) + 1)
return AutoDiff(new_val, temp_der, self.name)
def sqrt(self):
"""
Elementary function sqrt
Inputs
=======
None
Returns
=======
A new AutoDiff object with the square root
computation done on the value and derivative
EXAMPLES
=======
>>> x = AutoDiff(0.5, 1.0, "x")
>>> f1 = AutoDiff.sqrt(x)
>>> print(f1.val, f1.der)
[0.70710678] {'x': array([0.70710678])}
"""
new_val = self.val ** (1 / 2)
temp_der = {}
for variable in self.get_variables():
temp_der[variable] = self.der[variable] * ((1 / 2) * (self.val ** (- 1 / 2)))
return AutoDiff(new_val, temp_der, self.name)
def ln(self):
"""
Elementary function ln
Inputs
=======
None
Returns
=======
A new AutoDiff object with the natural log
computation done on the value and derivative
EXAMPLES
=======
>>> x = AutoDiff(0.5, 1.0, "x")
>>> f1 = AutoDiff.ln(x)
>>> print(f1.val, f1.der)
[-0.69314718] {'x': array([2.])}
"""
new_val = np.log(self.val)
temp_der = {}
for variable in self.get_variables():
temp_der[variable] = self.der[variable] * (1 / self.val)
return AutoDiff(new_val, temp_der, self.name)
def log(self, base):
"""
Elementary function log with a scalar base
Inputs
=======
scalar
Returns
=======A new AutoDiff object with the log (using a specified
base) computation done on the value and derivative
EXAMPLES
=======
>>> x = AutoDiff(0.5, 1.0, "x")
>>> f1 = AutoDiff.log(x, 10)
>>> print(f1.val, f1.der)
[-0.30103] {'x': array([0.86858896])}
"""
new_val = np.log(self.val) / np.log(base)
temp_der = {}
for variable in self.get_variables():
temp_der[variable] = self.der[variable] * (1 / (self.val * np.log(base)))
return AutoDiff(new_val, temp_der, self.name)
def exp(self):
"""
Elementary function exp with exponential base
Inputs
=======
None
Returns
=======
A new AutoDiff object with the natural exponential
computation done on the value and derivative
EXAMPLES
=======
>>> x = AutoDiff(0.5, 1.0, "x")
>>> f1 = AutoDiff.exp(x)
>>> print(f1.val, f1.der)
[1.64872127] {'x': array([1.64872127])}
"""
new_val = np.exp(self.val)
temp_der = {}
for variable in self.get_variables():
temp_der[variable] = self.der[variable] * np.exp(self.val)
return AutoDiff(new_val, temp_der, self.name)
def exp_base(self, base):
"""
Elementary function exp with a scalr base
Inputs
=======
scalar
Returns
=======
A new AutoDiff object with the exponential (using a specified base)
computation done on the value and derivative
EXAMPLES
=======
>>> x = AutoDiff(0.5, 1.0, "x")
>>> f1 = AutoDiff.exp_base(x, 10)
>>> print(f1.val, f1.der)
[3.16227766] {'x': array([7.2814134])}
"""
new_val = np.array([base ** float(v) for v in self.val])
temp_der = {}
for variable in self.get_variables():
temp_der[variable] = self.der[variable] * (base ** self.val) * np.log(base)
return AutoDiff(new_val, temp_der, self.name)
def logistic(self):
"""
Logistic function
Inputs
=======
None
Returns
=======
A new AutoDiff object calculated with logistic function
EXAMPLES
=======
>>> x = AutoDiff(0.5, 1.0, "x")
>>> f1 = AutoDiff.logistic(x)
>>> print(f1.val, f1.der)
[0.62245933] {'x': array([0.23500371])}
"""
new_val = 1 / (1 + np.exp(-self.val))
temp_der = {}
for variable in self.get_variables():
temp_der[variable] = self.der[variable] * np.exp(self.val) / ((1 + np.exp(self.val)) ** 2)
return AutoDiff(new_val, temp_der, self.name)
| 30.916826
| 114
| 0.506045
| 3,916
| 32,339
| 4.111083
| 0.07048
| 0.035654
| 0.024225
| 0.030188
| 0.82061
| 0.810423
| 0.780173
| 0.743028
| 0.721225
| 0.697186
| 0
| 0.042335
| 0.348465
| 32,339
| 1,045
| 115
| 30.946411
| 0.721737
| 0.443675
| 0
| 0.521401
| 0
| 0
| 0.054678
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.132296
| false
| 0
| 0.003891
| 0
| 0.311284
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
916c240b18480df1840829668d990f3cb3a7efae
| 12,140
|
py
|
Python
|
TextCNN/Module/trick.py
|
enternityFan/BJTUNLP_Practice
|
9bcd2a0a08a10164d0afc13f2a4ceeea4c87eedf
|
[
"Apache-2.0"
] | null | null | null |
TextCNN/Module/trick.py
|
enternityFan/BJTUNLP_Practice
|
9bcd2a0a08a10164d0afc13f2a4ceeea4c87eedf
|
[
"Apache-2.0"
] | null | null | null |
TextCNN/Module/trick.py
|
enternityFan/BJTUNLP_Practice
|
9bcd2a0a08a10164d0afc13f2a4ceeea4c87eedf
|
[
"Apache-2.0"
] | null | null | null |
# @Time : 2022-02-23 15:33
# @Author : Phalange
# @File : trick.py
# @Software: PyCharm
# C'est la vie,enjoy it! :D
import math
import torch
from torch import nn
from torch.optim import lr_scheduler
from d2l import torch as d2l
from tqdm import *
class CosineScheduler:
def __init__(self, max_update, base_lr=0.01, final_lr=0,
warmup_steps=0, warmup_begin_lr=0):
self.base_lr_orig = base_lr
self.max_update = max_update
self.final_lr = final_lr
self.warmup_steps = warmup_steps
self.warmup_begin_lr = warmup_begin_lr
self.max_steps = self.max_update - self.warmup_steps
def get_warmup_lr(self, epoch):
increase = (self.base_lr_orig - self.warmup_begin_lr) \
* float(epoch) / float(self.warmup_steps)
return self.warmup_begin_lr + increase
def __call__(self, epoch):
if epoch < self.warmup_steps:
return self.get_warmup_lr(epoch)
if epoch <= self.max_update:
self.base_lr = self.final_lr + (
self.base_lr_orig - self.final_lr) * (1 + math.cos(
math.pi * (epoch - self.warmup_steps) / self.max_steps)) / 2
return self.base_lr
def train_scheduler(net, train_iter, test_iter, loss, trainer, num_epochs,
devices=d2l.try_all_gpus(),scheduler=None):
"""Train a model with mutiple GPUs (defined in Chapter 13).
Defined in :numref:`sec_image_augmentation`"""
timer, num_batches = d2l.Timer(), len(train_iter)
animator = d2l.Animator(xlabel='epoch', xlim=[1, num_epochs], ylim=[0, 1],
legend=['train loss', 'train acc', 'test acc'])
net = nn.DataParallel(net, device_ids=devices).to(devices[0])
for epoch in tqdm(range(num_epochs)):
# Sum of training loss, sum of training accuracy, no. of examples,
# no. of predictions
metric = d2l.Accumulator(4)
for i, (features, labels) in enumerate(train_iter):
timer.start()
l, acc = train_batch(
net, features, labels, loss, trainer, devices)
metric.add(l, acc, labels.shape[0], labels.numel())
timer.stop()
if (i + 1) % (num_batches // 5) == 0 or i == num_batches - 1:
animator.add(epoch + (i + 1) / num_batches,
(metric[0] / metric[2], metric[1] / metric[3],
None))
test_acc = d2l.evaluate_accuracy_gpu(net, test_iter)
animator.add(epoch + 1, (None, None, test_acc))
if scheduler:
if scheduler.__module__ == lr_scheduler.__name__:
# UsingPyTorchIn-Builtscheduler
scheduler.step()
else:
# Usingcustomdefinedscheduler
for param_group in trainer.param_groups:
param_group['lr'] = scheduler(epoch)
print(f'the {epoch:d} epochs success!,the loss: {metric[0] / metric[2]:.3f},train acc '
f'{metric[1] / metric[3]:.3f}')
if epoch % 10 == 0 and epoch !=0:
print("save the" + str(epoch) +"times weight..")
torch.save(net.state_dict(), './Cache/AttentionWeights'+str(epoch) + '.pth')
print(f'loss {metric[0] / metric[2]:.3f}, train acc '
f'{metric[1] / metric[3]:.3f}, test acc {test_acc:.3f}')
print(f'{metric[2] * num_epochs / timer.sum():.1f} examples/sec on '
f'{str(devices)}')
def train_batch(net, X, y, loss, trainer, devices):
"""Train for a minibatch with mutiple GPUs (defined in Chapter 13).
Defined in :numref:`sec_image_augmentation`"""
if isinstance(X, list):
# Required for BERT fine-tuning (to be covered later)
X = [x.to(devices[0]) for x in X]
else:
X = X.to(devices[0])
y = y.to(devices[0]).long()
net.train()
trainer.zero_grad()
pred = net(X)
l = loss(pred, y)
l.sum().backward()
trainer.step()
train_loss_sum = l.sum()
train_acc_sum = d2l.accuracy(pred, y)
return train_loss_sum, train_acc_sum
def train_rnn_scheduler(net, train_iter, test_iter, loss, trainer, num_epochs,
devices=d2l.try_all_gpus(),scheduler=None):
"""Train a model with mutiple GPUs (defined in Chapter 13).
Defined in :numref:`sec_image_augmentation`"""
timer, num_batches = d2l.Timer(), len(train_iter)
animator = d2l.Animator(xlabel='epoch', xlim=[1, num_epochs], ylim=[0, 1],
legend=['train loss', 'train acc', 'test acc'])
net = nn.DataParallel(net, device_ids=devices).to(devices[0])
for epoch in range(num_epochs):
# Sum of training loss, sum of training accuracy, no. of examples,
# no. of predictions
metric = d2l.Accumulator(4)
for i, (features, labels) in enumerate(train_iter):
timer.start()
l, acc = train_batch_rnn(
net, features, labels, loss, trainer, devices)
metric.add(l, acc, labels.shape[0], labels.numel())
timer.stop()
if (i + 1) % (num_batches // 5) == 0 or i == num_batches - 1:
animator.add(epoch + (i + 1) / num_batches,
(metric[0] / metric[2], metric[1] / metric[3],
None))
test_acc = evaluate_accuracy_gpu_rnn(net, test_iter)
animator.add(epoch + 1, (None, None, test_acc))
if scheduler:
if scheduler.__module__ == lr_scheduler.__name__:
# UsingPyTorchIn-Builtscheduler
scheduler.step()
else:
# Usingcustomdefinedscheduler
for param_group in trainer.param_groups:
param_group['lr'] = scheduler(epoch)
print(f'the {epoch:d} epochs success!,the loss: {metric[0] / metric[2]:.3f},train acc '
f'{metric[1] / metric[3]:.3f}')
print(f'loss {metric[0] / metric[2]:.3f}, train acc '
f'{metric[1] / metric[3]:.3f}, test acc {test_acc:.3f}')
print(f'{metric[2] * num_epochs / timer.sum():.1f} examples/sec on '
f'{str(devices)}')
def train_batch_rnn(net, X, y, loss, trainer, devices):
"""Train for a minibatch with mutiple GPUs (defined in Chapter 13).
Defined in :numref:`sec_image_augmentation`"""
state,timer = None,d2l.Timer()
if isinstance(X, list):
# Required for BERT fine-tuning (to be covered later)
X = [x.to(devices[0]) for x in X]
else:
X = X.to(devices[0])
y = y.to(devices[0])
net.train()
trainer.zero_grad()
pred = net(X,state)
l = loss(pred, y)
l.sum().backward()
grad_clipping(net, 1)
trainer.step()
train_loss_sum = l.sum()
train_acc_sum = d2l.accuracy(pred, y)
return train_loss_sum, train_acc_sum
def grad_clipping(net, theta): #@save
"""裁剪梯度"""
if isinstance(net, nn.Module):
params = [p for p in net.parameters() if p.requires_grad]
else:
params = net.params
norm = torch.sqrt(sum(torch.sum((p.grad ** 2)) for p in params))
if norm > theta:
for param in params:
param.grad[:] *= theta / norm
def evaluate_accuracy_gpu_rnn(net, data_iter, device=None):
"""Compute the accuracy for a model on a dataset using a GPU.
Defined in :numref:`sec_lenet`"""
state = None
if isinstance(net, nn.Module):
net.eval() # Set the model to evaluation mode
if not device:
device = next(iter(net.parameters())).device
# No. of correct predictions, no. of predictions
metric = d2l.Accumulator(2)
with torch.no_grad():
for X, y in data_iter:
if isinstance(X, list):
# Required for BERT Fine-tuning (to be covered later)
X = [x.to(device) for x in X]
else:
X = X.to(device)
y = y.to(device)
metric.add(d2l.accuracy(net(X,state), y), d2l.size(y))
return metric[0] / metric[1]
def train_transformer_scheduler(net, train_iter, test_iter, loss, trainer, num_epochs,
devices=d2l.try_all_gpus(),scheduler=None):
"""Train a model with mutiple GPUs (defined in Chapter 13).
Defined in :numref:`sec_image_augmentation`"""
def xavier_init_weights(m):
if type(m) == nn.Linear:
nn.init.xavier_uniform_(m.weight)
if type(m) == nn.GRU:
for param in m._flat_weights_names:
if "weight" in param:
nn.init.xavier_uniform_(m._parameters[param])
timer, num_batches = d2l.Timer(), len(train_iter)
animator = d2l.Animator(xlabel='epoch', xlim=[1, num_epochs], ylim=[0, 1],
legend=['train loss', 'train acc', 'test acc'])
net.apply(xavier_init_weights)
net = nn.DataParallel(net, device_ids=devices).to(devices[0])
for epoch in range(num_epochs):
# Sum of training loss, sum of training accuracy, no. of examples,
# no. of predictions
metric = d2l.Accumulator(4)
for i, (features, labels) in enumerate(train_iter):
timer.start()
l, acc = train_batch_transformer(
net, features, labels, loss, trainer, devices)
metric.add(l, acc, labels.shape[0], labels.numel())
timer.stop()
if (i + 1) % (num_batches // 5) == 0 or i == num_batches - 1:
animator.add(epoch + (i + 1) / num_batches,
(metric[0] / metric[2], metric[1] / metric[3],
None))
test_acc = evaluate_accuracy_gpu_transformer(net, test_iter)
animator.add(epoch + 1, (None, None, test_acc))
if scheduler:
if scheduler.__module__ == lr_scheduler.__name__:
# UsingPyTorchIn-Builtscheduler
scheduler.step()
else:
# Usingcustomdefinedscheduler
for param_group in trainer.param_groups:
param_group['lr'] = scheduler(epoch)
print(f'the {epoch:d} epochs success!,the loss: {metric[0] / metric[2]:.3f},train acc '
f'{metric[1] / metric[3]:.3f}')
print(f'loss {metric[0] / metric[2]:.3f}, train acc '
f'{metric[1] / metric[3]:.3f}, test acc {test_acc:.3f}')
print(f'{metric[2] * num_epochs / timer.sum():.1f} examples/sec on '
f'{str(devices)}')
def train_batch_transformer(net, X, y, loss, trainer, devices):
"""Train for a minibatch with mutiple GPUs (defined in Chapter 13).
Defined in :numref:`sec_image_augmentation`"""
if isinstance(X, list):
# Required for BERT fine-tuning (to be covered later)
X = [x.to(devices[0]) for x in X]
else:
X = X.to(devices[0])
y = y.to(devices[0])
net.train()
trainer.zero_grad()
pred = net(X)
l = loss(pred, y)
l.sum().backward()
#grad_clipping(net, 1)
trainer.step()
train_loss_sum = l.sum()
train_acc_sum = d2l.accuracy(pred, y)
return train_loss_sum, train_acc_sum
def evaluate_accuracy_gpu_transformer(net, data_iter, device=None):
"""Compute the accuracy for a model on a dataset using a GPU.
Defined in :numref:`sec_lenet`"""
state = None
if isinstance(net, nn.Module):
net.eval() # Set the model to evaluation mode
if not device:
device = next(iter(net.parameters())).device
# No. of correct predictions, no. of predictions
metric = d2l.Accumulator(2)
with torch.no_grad():
for X, y in data_iter:
if isinstance(X, list):
# Required for BERT Fine-tuning (to be covered later)
X = [x.to(device) for x in X]
else:
X = X.to(device)
y = y.to(device)
metric.add(d2l.accuracy(net(X), y), d2l.size(y))
return metric[0] / metric[1]
if __name__ == "__main__":
lr, num_epochs = 0.3, 30
scheduler = CosineScheduler(max_update=10, base_lr=0.001, final_lr=0.00003)
d2l.plot(torch.arange(num_epochs), [scheduler(t) for t in range(num_epochs)])
#d2l.plt.show()
| 39.673203
| 95
| 0.586161
| 1,646
| 12,140
| 4.170109
| 0.12819
| 0.019668
| 0.017483
| 0.018357
| 0.805216
| 0.773893
| 0.773893
| 0.773893
| 0.773893
| 0.765443
| 0
| 0.021899
| 0.285338
| 12,140
| 306
| 96
| 39.673203
| 0.769249
| 0.149506
| 0
| 0.690265
| 0
| 0.013274
| 0.096673
| 0.002348
| 0
| 0
| 0
| 0
| 0
| 1
| 0.057522
| false
| 0
| 0.026549
| 0
| 0.123894
| 0.044248
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
531adcd78d0c1661d85ef12e4a00e5d9a7596cb3
| 6,773
|
py
|
Python
|
imagecluster/main.py
|
Little-Frog-233/imagecluster
|
fdd68a7d13b039b4c17a8b48eab95cb87556802f
|
[
"BSD-3-Clause"
] | 1
|
2021-12-31T05:01:22.000Z
|
2021-12-31T05:01:22.000Z
|
imagecluster/main.py
|
Little-Frog-233/imagecluster
|
fdd68a7d13b039b4c17a8b48eab95cb87556802f
|
[
"BSD-3-Clause"
] | null | null | null |
imagecluster/main.py
|
Little-Frog-233/imagecluster
|
fdd68a7d13b039b4c17a8b48eab95cb87556802f
|
[
"BSD-3-Clause"
] | null | null | null |
#coding:utf-8
import os
import pandas as pd
import numpy as np
from imagecluster import calc as ic
from imagecluster import common as co
from imagecluster import postproc as pp
from imagecluster.log import log
pj = os.path.join
ic_base_dir = 'imagecluster'
def main_hierarchy(imagedir, sim=0.5, layer='fc2', size=(224,224), links=True, vis=False,
max_csize=None, pca=False, pca_params=dict(n_components=0.9)):
"""Example main app using this library.
Upon first invocation, the image and fingerprint databases are built and
written to disk. Each new invocation loads those and only repeats
* clustering
* creation of links to files in clusters
* visualization (if `vis=True`)
This is good for playing around with the `sim` parameter, for
instance, which only influences clustering.
Parameters
----------
imagedir : str
path to directory with images
sim : float (0..1)
similarity index (see :func:`calc.cluster`)
layer : str
which layer to use as feature vector (see
:func:`calc.get_model`)
size : tuple
input image size (width, height), must match `model`, e.g. (224,224)
links : bool
create dirs with links
vis : bool
plot images in clusters
max_csize : max number of images per cluster for visualization (see
:mod:`~postproc`)
pca : bool
Perform PCA on fingerprints before clustering, using `pca_params`.
pca_params : dict
kwargs to sklearn's PCA
Notes
-----
imagedir : To select only a subset of the images, create an `imagedir` and
symlink your selected images there. In the future, we may add support
for passing a list of files, should the need arise. But then again,
this function is only an example front-end.
"""
logger_hierarchy = log(logger_name='hierarchy').logger
fps_fn = pj(imagedir, ic_base_dir, 'fingerprints.pk')
ias_fn = pj(imagedir, ic_base_dir, 'images.pk')
ias = None
try:
if not os.path.exists(fps_fn):
print("no fingerprints database {} found".format(fps_fn))
logger_hierarchy.info("no fingerprints database {} found".format(fps_fn))
os.makedirs(os.path.dirname(fps_fn), exist_ok=True)
try:
model = ic.get_model(layer=layer)
except Exception as e:
logger_hierarchy.error(e)
if not os.path.exists(ias_fn):
print("create image array database {}".format(ias_fn))
logger_hierarchy.info("create image array database {}".format(ias_fn))
ias = ic.image_arrays(imagedir, size=size)
co.write_pk(ias, ias_fn)
else:
ias = co.read_pk(ias_fn)
print("running all images through NN model ...")
fps = ic.fingerprints(ias, model)
co.write_pk(fps, fps_fn)
else:
print("loading fingerprints database {} ...".format(fps_fn))
fps = co.read_pk(fps_fn)
if pca:
fps = ic.pca(fps, **pca_params)
print("pca dims:", list(fps.values())[0].shape[0])
#将每张图片转换成向量
#进行聚类
print("clustering ...")
clusters = ic.cluster(fps, sim)
if links:
pp.make_links(clusters, pj(imagedir, ic_base_dir, 'clusters'))
if vis:
if ias is None:
ias = co.read_pk(ias_fn)
pp.visualize(clusters, ias, max_csize=max_csize)
except Exception as e:
logger_hierarchy.error(e)
def main_kmeans(imagedir, n_clusters=5, layer='fc2', size=(224,224), links=True, pca=False, pca_params=dict(n_components=0.9)):
"""Example main app using this library.
Upon first invocation, the image and fingerprint databases are built and
written to disk. Each new invocation loads those and only repeats
* clustering
* creation of links to files in clusters
* visualization (if `vis=True`)
This is good for playing around with the `sim` parameter, for
instance, which only influences clustering.
Parameters
----------
imagedir : str
path to directory with images
n_cluster : int (1...999)
num of kmeans cluster (see :func:`calc.cluster_kmeans`)
layer : str
which layer to use as feature vector (see
:func:`calc.get_model`)
size : tuple
input image size (width, height), must match `model`, e.g. (224,224)
links : bool
create dirs with links
pca : bool
Perform PCA on fingerprints before clustering, using `pca_params`.
pca_params : dict
kwargs to sklearn's PCA
Notes
-----
imagedir : To select only a subset of the images, create an `imagedir` and
symlink your selected images there. In the future, we may add support
for passing a list of files, should the need arise. But then again,
this function is only an example front-end.
"""
fps_fn = pj(imagedir, ic_base_dir, 'fingerprints.pk')
ias_fn = pj(imagedir, ic_base_dir, 'images.pk')
ias = None
logger_kmeans = log(logger_name='kmeans').logger
try:
if not os.path.exists(fps_fn):
print("no fingerprints database {} found".format(fps_fn))
logger_kmeans.info("no fingerprints database {} found".format(fps_fn))
os.makedirs(os.path.dirname(fps_fn), exist_ok=True)
try:
model = ic.get_model(layer=layer)
except Exception as e:
logger_kmeans.error(e)
if not os.path.exists(ias_fn):
logger_kmeans.info("create image array database {}".format(ias_fn))
print("create image array database {}".format(ias_fn))
ias = ic.image_arrays(imagedir, size=size)
co.write_pk(ias, ias_fn)
else:
ias = co.read_pk(ias_fn)
print("running all images through NN model ...")
fps = ic.fingerprints(ias, model)
co.write_pk(fps, fps_fn)
else:
print("loading fingerprints database {} ...".format(fps_fn))
fps = co.read_pk(fps_fn)
if pca:
fps = ic.pca(fps, **pca_params)
print("pca dims:", list(fps.values())[0].shape[0])
logger_kmeans.info("pca dims: " + str(list(fps.values())[0].shape[0]))
#将每张图片转换成向量
#进行聚类
print("clustering ...")
logger_kmeans.info("clustering ...")
clusters = ic.cluster_kmeans(fps, n_clusters=n_clusters)
if links:
pp.make_links_v2(clusters, pj(imagedir, ic_base_dir, 'clusters'))
except Exception as e:
logger_kmeans.error(e)
| 38.050562
| 127
| 0.613908
| 909
| 6,773
| 4.465347
| 0.216722
| 0.019709
| 0.015521
| 0.023651
| 0.809559
| 0.80069
| 0.796748
| 0.779502
| 0.734171
| 0.721606
| 0
| 0.009717
| 0.285841
| 6,773
| 177
| 128
| 38.265537
| 0.82944
| 0.365274
| 0
| 0.693182
| 0
| 0
| 0.141648
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.022727
| false
| 0
| 0.079545
| 0
| 0.102273
| 0.204545
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
532525617c88cf9206bddcbe6bb2bde7097edaa6
| 36
|
py
|
Python
|
discord/types/message.py
|
Harukomaze/disnake
|
541f5c9623a02be894cd1015dbb344070700cb87
|
[
"MIT"
] | null | null | null |
discord/types/message.py
|
Harukomaze/disnake
|
541f5c9623a02be894cd1015dbb344070700cb87
|
[
"MIT"
] | null | null | null |
discord/types/message.py
|
Harukomaze/disnake
|
541f5c9623a02be894cd1015dbb344070700cb87
|
[
"MIT"
] | null | null | null |
from disnake.types.message import *
| 18
| 35
| 0.805556
| 5
| 36
| 5.8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111111
| 36
| 1
| 36
| 36
| 0.90625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
533a5c10796a9a8aaa30d7cd87047655bcb6ff71
| 46
|
py
|
Python
|
__init__.py
|
smwa/multiprocessing_tools
|
11f00f0cc12cf1b23a6e3a9daafaf8c98529a6e7
|
[
"MIT"
] | null | null | null |
__init__.py
|
smwa/multiprocessing_tools
|
11f00f0cc12cf1b23a6e3a9daafaf8c98529a6e7
|
[
"MIT"
] | null | null | null |
__init__.py
|
smwa/multiprocessing_tools
|
11f00f0cc12cf1b23a6e3a9daafaf8c98529a6e7
|
[
"MIT"
] | null | null | null |
from multiprocessing_tools import map, filter
| 23
| 45
| 0.869565
| 6
| 46
| 6.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.108696
| 46
| 1
| 46
| 46
| 0.95122
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
53526a1a3a4247402490b9972f4604c2d9e53518
| 2,980
|
py
|
Python
|
tests/dsl/test_parser.py
|
kikuchi-m/ceryle
|
1f91a9aaa17c60700d8827158cb69e7220200757
|
[
"MIT"
] | 2
|
2019-10-29T22:50:28.000Z
|
2020-03-25T03:06:48.000Z
|
tests/dsl/test_parser.py
|
kikuchi-m/ceryle
|
1f91a9aaa17c60700d8827158cb69e7220200757
|
[
"MIT"
] | null | null | null |
tests/dsl/test_parser.py
|
kikuchi-m/ceryle
|
1f91a9aaa17c60700d8827158cb69e7220200757
|
[
"MIT"
] | null | null | null |
import pathlib
from ceryle import Command, Task, TaskGroup
from ceryle.dsl.parser import parse_tasks
def test_parse():
raw_tasks = {
'g1': {
'tasks': [{
'run': Command('do some'),
}, {
'run': Command('do some more'),
}],
},
'g2': {
'dependencies': ['g1'],
'tasks': [{
'run': Command('do awesome'),
}],
},
}
tasks = dict([(g.name, g) for g in parse_tasks(raw_tasks, 'context', 'file1.ceryle')])
assert len(tasks) == 2
g1 = tasks['g1']
assert isinstance(g1, TaskGroup)
assert g1.name == 'g1'
assert g1.context == pathlib.Path('context')
assert g1.dependencies == []
assert g1.filename == 'file1.ceryle'
assert len(g1.tasks) == 2
assert isinstance(g1.tasks[0], Task)
assert g1.tasks[0].executable.cmd == ['do', 'some']
assert isinstance(g1.tasks[1], Task)
assert g1.tasks[1].executable.cmd == ['do', 'some', 'more']
g2 = tasks['g2']
assert isinstance(g2, TaskGroup)
assert g2.name == 'g2'
assert g2.context == pathlib.Path('context')
assert g2.dependencies == ['g1']
assert g2.filename == 'file1.ceryle'
assert len(g2.tasks) == 1
assert isinstance(g2.tasks[0], Task)
assert g2.tasks[0].executable.cmd == ['do', 'awesome']
def test_parse_syntax_suger():
raw_tasks = {
'g1': {
'tasks': [
Command('do some'),
Command('do some more'),
],
},
'g2': [
Command('do awesome'),
Command('do awesome more'),
],
}
tasks = dict([(g.name, g) for g in parse_tasks(raw_tasks, 'context', 'file1.ceryle')])
assert len(tasks) == 2
g1 = tasks['g1']
assert isinstance(g1, TaskGroup)
assert g1.name == 'g1'
assert g1.dependencies == []
assert g1.filename == 'file1.ceryle'
assert len(g1.tasks) == 2
assert isinstance(g1.tasks[0], Task)
assert g1.tasks[0].executable.cmd == ['do', 'some']
assert isinstance(g1.tasks[1], Task)
assert g1.tasks[1].executable.cmd == ['do', 'some', 'more']
g2 = tasks['g2']
assert isinstance(g2, TaskGroup)
assert g2.name == 'g2'
assert g2.dependencies == []
assert g2.filename == 'file1.ceryle'
assert len(g2.tasks) == 2
assert isinstance(g2.tasks[0], Task)
assert g2.tasks[0].executable.cmd == ['do', 'awesome']
assert isinstance(g2.tasks[1], Task)
assert g2.tasks[1].executable.cmd == ['do', 'awesome', 'more']
def test_parse_no_tasks():
raw_tasks = {
'g1': {
},
}
tasks = dict([(g.name, g) for g in parse_tasks(raw_tasks, 'context', 'file1.ceryle')])
assert len(tasks) == 1
g1 = tasks['g1']
assert isinstance(g1, TaskGroup)
assert g1.name == 'g1'
assert g1.dependencies == []
assert g1.filename == 'file1.ceryle'
assert len(g1.tasks) == 0
| 26.371681
| 90
| 0.550671
| 362
| 2,980
| 4.486188
| 0.116022
| 0.077586
| 0.083744
| 0.098522
| 0.799877
| 0.705665
| 0.705665
| 0.705665
| 0.705665
| 0.652709
| 0
| 0.043296
| 0.279195
| 2,980
| 112
| 91
| 26.607143
| 0.712756
| 0
| 0
| 0.606742
| 0
| 0
| 0.113087
| 0
| 0
| 0
| 0
| 0
| 0.494382
| 1
| 0.033708
| false
| 0
| 0.033708
| 0
| 0.067416
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
5368ff455d9fad548304189adebff730d8b0f478
| 15,404
|
py
|
Python
|
tests/test_pydv_images.py
|
fillmore1/PyDV
|
4258e00ae7261b779f0787278f01d007fc68f77e
|
[
"BSD-3-Clause"
] | 2
|
2019-04-04T02:32:04.000Z
|
2019-04-06T16:43:26.000Z
|
tests/test_pydv_images.py
|
fillmore1/PyDV
|
4258e00ae7261b779f0787278f01d007fc68f77e
|
[
"BSD-3-Clause"
] | 37
|
2019-04-03T23:25:09.000Z
|
2020-02-05T23:57:02.000Z
|
tests/test_pydv_images.py
|
fillmore1/PyDV
|
4258e00ae7261b779f0787278f01d007fc68f77e
|
[
"BSD-3-Clause"
] | 2
|
2019-04-25T15:56:31.000Z
|
2019-09-04T20:16:50.000Z
|
import os
import shutil
import subprocess
from matplotlib import image
from numpy import testing as np
TEST_DIR = os.path.dirname(os.path.abspath(__file__))
PYDV_DIR = os.path.dirname(TEST_DIR)
BASELINE_DIR = os.path.join(TEST_DIR, 'baseline')
# ------------------------ #
# --- Prepare the data --- #
# ------------------------ #
# The output directory will store the generated images to compare against the baseline
output_dir = os.path.join(TEST_DIR, 'output')
if os.path.exists(output_dir):
shutil.rmtree(output_dir)
os.makedirs(output_dir)
# Generate a list of commands for PyDV to process. Between each command, we will
# place an "image" statement, which will cause PyDV to save the current state of
# the plot.
commands = [
f"""rd {os.path.join(TEST_DIR, "testData.txt")}
cur 1 2""",
"legend off",
"erase",
"""cur 1 2
L1 a b""",
"L2 a b 3.0 5.5",
"del c d",
"color a blue",
"color a red",
"add a b",
"annot FOO 3 7",
"convolve a b",
"""del d
copy a""",
"cos a",
"""del d
dashstyle b [2, 2, 4, 2]""",
"dataid off",
"""dataid on
delannot 1""",
"derivative a",
"""del d
dy b 2.5
dx b 3""",
"""dx b -3
divide c a""",
"""del d
divx c 2
divy c 2""",
"dom 0 10",
"dom de",
"exp a",
"log a",
"grid off",
"""grid on
integrate a""",
"""del d
linespoints a on
marker a . 20""",
"lnwidth b 10",
"""lnwidth b 3
makecurve (1 2 3) (5 2 3)""",
"""del d
mx c 2""",
"my a 3",
"recip a",
"scatter b on",
"""scatter b off
cos b""",
"acos b",
"cosh b",
"acosh b",
"sin c",
"asin c",
"sinh c",
"asinh c",
"sqr b",
"sqrt b",
"sqrx b",
"sqrtx b",
"tan a",
"atan a",
"tanh a",
"atanh a",
"a - b",
"""del d
b ** 2""",
"c / b",
"smooth d",
"""dy d -3
abs d""",
"""erase
legend on
gaussian 1 1 5""",
"exp A",
"log A",
"expx A",
"logx A",
"""exp A
sin A
log A"""
]
commands_file = os.path.join(output_dir, 'pydv_commands')
with open(commands_file, 'w') as fp:
for i, command in enumerate(commands):
image_file = os.path.join(output_dir, f"test_image_{i+1:02d}")
fp.write(command)
fp.write(f"\nimage {image_file} png\n\n")
fp.write("\nquit")
# Execute PyDv
exec_command = f"{os.path.join(PYDV_DIR, 'pydv', 'pdv')} -i {commands_file}"
process = subprocess.Popen(exec_command.split(), stdout=subprocess.PIPE)
output, error = process.communicate()
# ----------------- #
# --- Run tests --- #
# ----------------- #
# # Helper text to generate the below tests for pytest
# with open('delete_me.txt', 'w') as fp:
# for i in range(60):
# filename = f"test_image_{i+1:02d}.png"
# statement=f"""
# def test_image_{i+1:02d}():
# baseline = image.imread(os.path.join(BASELINE_DIR, '{filename}'))
# output = image.imread(os.path.join(output_dir, '{filename}'))
# np.assert_equal(baseline, output)
# """
# fp.write(statement)
# statement = ''
def test_image_01():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_01.png'))
output = image.imread(os.path.join(output_dir, 'test_image_01.png'))
np.assert_equal(baseline, output)
def test_image_02():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_02.png'))
output = image.imread(os.path.join(output_dir, 'test_image_02.png'))
np.assert_equal(baseline, output)
def test_image_03():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_03.png'))
output = image.imread(os.path.join(output_dir, 'test_image_03.png'))
np.assert_equal(baseline, output)
def test_image_04():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_04.png'))
output = image.imread(os.path.join(output_dir, 'test_image_04.png'))
np.assert_equal(baseline, output)
def test_image_05():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_05.png'))
output = image.imread(os.path.join(output_dir, 'test_image_05.png'))
np.assert_equal(baseline, output)
def test_image_06():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_06.png'))
output = image.imread(os.path.join(output_dir, 'test_image_06.png'))
np.assert_equal(baseline, output)
def test_image_07():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_07.png'))
output = image.imread(os.path.join(output_dir, 'test_image_07.png'))
np.assert_equal(baseline, output)
def test_image_08():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_08.png'))
output = image.imread(os.path.join(output_dir, 'test_image_08.png'))
np.assert_equal(baseline, output)
def test_image_09():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_09.png'))
output = image.imread(os.path.join(output_dir, 'test_image_09.png'))
np.assert_equal(baseline, output)
def test_image_10():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_10.png'))
output = image.imread(os.path.join(output_dir, 'test_image_10.png'))
np.assert_equal(baseline, output)
def test_image_11():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_11.png'))
output = image.imread(os.path.join(output_dir, 'test_image_11.png'))
np.assert_equal(baseline, output)
def test_image_12():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_12.png'))
output = image.imread(os.path.join(output_dir, 'test_image_12.png'))
np.assert_equal(baseline, output)
def test_image_13():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_13.png'))
output = image.imread(os.path.join(output_dir, 'test_image_13.png'))
np.assert_equal(baseline, output)
def test_image_14():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_14.png'))
output = image.imread(os.path.join(output_dir, 'test_image_14.png'))
np.assert_equal(baseline, output)
def test_image_15():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_15.png'))
output = image.imread(os.path.join(output_dir, 'test_image_15.png'))
np.assert_equal(baseline, output)
def test_image_16():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_16.png'))
output = image.imread(os.path.join(output_dir, 'test_image_16.png'))
np.assert_equal(baseline, output)
def test_image_17():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_17.png'))
output = image.imread(os.path.join(output_dir, 'test_image_17.png'))
np.assert_equal(baseline, output)
def test_image_18():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_18.png'))
output = image.imread(os.path.join(output_dir, 'test_image_18.png'))
np.assert_equal(baseline, output)
def test_image_19():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_19.png'))
output = image.imread(os.path.join(output_dir, 'test_image_19.png'))
np.assert_equal(baseline, output)
def test_image_20():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_20.png'))
output = image.imread(os.path.join(output_dir, 'test_image_20.png'))
np.assert_equal(baseline, output)
def test_image_21():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_21.png'))
output = image.imread(os.path.join(output_dir, 'test_image_21.png'))
np.assert_equal(baseline, output)
def test_image_22():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_22.png'))
output = image.imread(os.path.join(output_dir, 'test_image_22.png'))
np.assert_equal(baseline, output)
def test_image_23():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_23.png'))
output = image.imread(os.path.join(output_dir, 'test_image_23.png'))
np.assert_equal(baseline, output)
def test_image_24():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_24.png'))
output = image.imread(os.path.join(output_dir, 'test_image_24.png'))
np.assert_equal(baseline, output)
def test_image_25():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_25.png'))
output = image.imread(os.path.join(output_dir, 'test_image_25.png'))
np.assert_equal(baseline, output)
def test_image_26():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_26.png'))
output = image.imread(os.path.join(output_dir, 'test_image_26.png'))
np.assert_equal(baseline, output)
def test_image_27():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_27.png'))
output = image.imread(os.path.join(output_dir, 'test_image_27.png'))
np.assert_equal(baseline, output)
def test_image_28():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_28.png'))
output = image.imread(os.path.join(output_dir, 'test_image_28.png'))
np.assert_equal(baseline, output)
def test_image_29():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_29.png'))
output = image.imread(os.path.join(output_dir, 'test_image_29.png'))
np.assert_equal(baseline, output)
def test_image_30():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_30.png'))
output = image.imread(os.path.join(output_dir, 'test_image_30.png'))
np.assert_equal(baseline, output)
def test_image_31():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_31.png'))
output = image.imread(os.path.join(output_dir, 'test_image_31.png'))
np.assert_equal(baseline, output)
def test_image_32():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_32.png'))
output = image.imread(os.path.join(output_dir, 'test_image_32.png'))
np.assert_equal(baseline, output)
def test_image_33():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_33.png'))
output = image.imread(os.path.join(output_dir, 'test_image_33.png'))
np.assert_equal(baseline, output)
def test_image_34():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_34.png'))
output = image.imread(os.path.join(output_dir, 'test_image_34.png'))
np.assert_equal(baseline, output)
def test_image_35():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_35.png'))
output = image.imread(os.path.join(output_dir, 'test_image_35.png'))
np.assert_equal(baseline, output)
def test_image_36():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_36.png'))
output = image.imread(os.path.join(output_dir, 'test_image_36.png'))
np.assert_equal(baseline, output)
def test_image_37():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_37.png'))
output = image.imread(os.path.join(output_dir, 'test_image_37.png'))
np.assert_equal(baseline, output)
def test_image_38():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_38.png'))
output = image.imread(os.path.join(output_dir, 'test_image_38.png'))
np.assert_equal(baseline, output)
def test_image_39():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_39.png'))
output = image.imread(os.path.join(output_dir, 'test_image_39.png'))
np.assert_equal(baseline, output)
def test_image_40():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_40.png'))
output = image.imread(os.path.join(output_dir, 'test_image_40.png'))
np.assert_equal(baseline, output)
def test_image_41():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_41.png'))
output = image.imread(os.path.join(output_dir, 'test_image_41.png'))
np.assert_equal(baseline, output)
def test_image_42():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_42.png'))
output = image.imread(os.path.join(output_dir, 'test_image_42.png'))
np.assert_equal(baseline, output)
def test_image_43():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_43.png'))
output = image.imread(os.path.join(output_dir, 'test_image_43.png'))
np.assert_equal(baseline, output)
def test_image_44():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_44.png'))
output = image.imread(os.path.join(output_dir, 'test_image_44.png'))
np.assert_equal(baseline, output)
def test_image_45():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_45.png'))
output = image.imread(os.path.join(output_dir, 'test_image_45.png'))
np.assert_equal(baseline, output)
def test_image_46():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_46.png'))
output = image.imread(os.path.join(output_dir, 'test_image_46.png'))
np.assert_equal(baseline, output)
def test_image_47():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_47.png'))
output = image.imread(os.path.join(output_dir, 'test_image_47.png'))
np.assert_equal(baseline, output)
def test_image_48():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_48.png'))
output = image.imread(os.path.join(output_dir, 'test_image_48.png'))
np.assert_equal(baseline, output)
def test_image_49():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_49.png'))
output = image.imread(os.path.join(output_dir, 'test_image_49.png'))
np.assert_equal(baseline, output)
def test_image_50():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_50.png'))
output = image.imread(os.path.join(output_dir, 'test_image_50.png'))
np.assert_equal(baseline, output)
def test_image_51():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_51.png'))
output = image.imread(os.path.join(output_dir, 'test_image_51.png'))
np.assert_equal(baseline, output)
def test_image_52():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_52.png'))
output = image.imread(os.path.join(output_dir, 'test_image_52.png'))
np.assert_equal(baseline, output)
def test_image_53():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_53.png'))
output = image.imread(os.path.join(output_dir, 'test_image_53.png'))
np.assert_equal(baseline, output)
def test_image_54():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_54.png'))
output = image.imread(os.path.join(output_dir, 'test_image_54.png'))
np.assert_equal(baseline, output)
def test_image_55():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_55.png'))
output = image.imread(os.path.join(output_dir, 'test_image_55.png'))
np.assert_equal(baseline, output)
def test_image_56():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_56.png'))
output = image.imread(os.path.join(output_dir, 'test_image_56.png'))
np.assert_equal(baseline, output)
def test_image_57():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_57.png'))
output = image.imread(os.path.join(output_dir, 'test_image_57.png'))
np.assert_equal(baseline, output)
def test_image_58():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_58.png'))
output = image.imread(os.path.join(output_dir, 'test_image_58.png'))
np.assert_equal(baseline, output)
def test_image_59():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_59.png'))
output = image.imread(os.path.join(output_dir, 'test_image_59.png'))
np.assert_equal(baseline, output)
def test_image_60():
baseline = image.imread(os.path.join(BASELINE_DIR, 'test_image_60.png'))
output = image.imread(os.path.join(output_dir, 'test_image_60.png'))
np.assert_equal(baseline, output)
| 34.929705
| 86
| 0.709945
| 2,446
| 15,404
| 4.235487
| 0.092396
| 0.158977
| 0.123552
| 0.200193
| 0.852896
| 0.822394
| 0.808301
| 0.808301
| 0.800965
| 0.561776
| 0
| 0.030975
| 0.130226
| 15,404
| 440
| 87
| 35.009091
| 0.742275
| 0.054661
| 0
| 0.185185
| 0
| 0
| 0.180838
| 0.003262
| 0
| 0
| 0
| 0
| 0.185185
| 1
| 0.185185
| false
| 0
| 0.015432
| 0
| 0.200617
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
7254c94deffa2e831b9f7802e28abe7ff393fbd2
| 27
|
py
|
Python
|
kpireport/tests/test_theme.py
|
diurnalist/kpireporter
|
b3ce9ca52567405557ea12f45c1a7fda076d746a
|
[
"BlueOak-1.0.0",
"Apache-2.0"
] | 9
|
2021-05-17T05:32:46.000Z
|
2022-03-16T22:49:26.000Z
|
kpireport/tests/test_theme.py
|
diurnalist/kpireporter
|
b3ce9ca52567405557ea12f45c1a7fda076d746a
|
[
"BlueOak-1.0.0",
"Apache-2.0"
] | 4
|
2020-10-10T23:38:20.000Z
|
2020-11-08T22:41:24.000Z
|
kpireport/tests/test_theme.py
|
diurnalist/kpireporter
|
b3ce9ca52567405557ea12f45c1a7fda076d746a
|
[
"BlueOak-1.0.0",
"Apache-2.0"
] | 1
|
2021-01-12T02:49:04.000Z
|
2021-01-12T02:49:04.000Z
|
def test_theme():
pass
| 9
| 17
| 0.62963
| 4
| 27
| 4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.259259
| 27
| 2
| 18
| 13.5
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 6
|
72d240f7008c24054fe09fe5e48cdb067b7af11c
| 48
|
py
|
Python
|
em2/auth/__init__.py
|
samuelcolvin/em2
|
a587eaa80c09a2b44d9c221d09a563aad5b05d78
|
[
"MIT"
] | 5
|
2019-03-20T19:07:45.000Z
|
2020-10-03T01:16:05.000Z
|
em2/auth/__init__.py
|
samuelcolvin/em2
|
a587eaa80c09a2b44d9c221d09a563aad5b05d78
|
[
"MIT"
] | 51
|
2019-03-12T16:19:46.000Z
|
2021-03-09T00:52:24.000Z
|
em2/auth/__init__.py
|
samuelcolvin/em2
|
a587eaa80c09a2b44d9c221d09a563aad5b05d78
|
[
"MIT"
] | 1
|
2019-05-31T14:41:18.000Z
|
2019-05-31T14:41:18.000Z
|
from .main import create_app_auth # noqa: F401
| 24
| 47
| 0.770833
| 8
| 48
| 4.375
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.075
| 0.166667
| 48
| 1
| 48
| 48
| 0.8
| 0.208333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
72e5fbfbc76386fcacccff2adaf9beb8bec3a5a0
| 242
|
py
|
Python
|
src/simmate/calculators/vasp/workflows/energy/__init__.py
|
laurenmm/simmate-1
|
c06b94c46919b01cda50f78221ad14f75c100a14
|
[
"BSD-3-Clause"
] | 9
|
2021-12-21T02:58:21.000Z
|
2022-01-25T14:00:06.000Z
|
src/simmate/calculators/vasp/workflows/energy/__init__.py
|
laurenmm/simmate-1
|
c06b94c46919b01cda50f78221ad14f75c100a14
|
[
"BSD-3-Clause"
] | 51
|
2022-01-01T15:59:58.000Z
|
2022-03-26T21:25:42.000Z
|
src/simmate/calculators/vasp/workflows/energy/__init__.py
|
laurenmm/simmate-1
|
c06b94c46919b01cda50f78221ad14f75c100a14
|
[
"BSD-3-Clause"
] | 7
|
2022-01-01T03:44:32.000Z
|
2022-03-29T19:59:27.000Z
|
# -*- coding: utf-8 -*-
from .materials_project import workflow as matproj_workflow
from .mit import workflow as mit_workflow
from .quality_04 import workflow as quality04_workflow
from .neb_endpoint import workflow as neb_endpoint_workflow
| 34.571429
| 59
| 0.822314
| 35
| 242
| 5.457143
| 0.457143
| 0.293194
| 0.335079
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.023585
| 0.123967
| 242
| 6
| 60
| 40.333333
| 0.877358
| 0.086777
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
f443b011be36ac96d6d1e31d3bece9a92fa8a7b1
| 260
|
py
|
Python
|
coremltools/converters/nnssa/commons/serialization/__init__.py
|
Gerzer/coremltools
|
47e2010a68668bd1960dca040f5f87c0e66a0cbd
|
[
"BSD-3-Clause"
] | 65
|
2019-10-02T09:56:22.000Z
|
2022-03-16T22:41:14.000Z
|
coremltools/converters/nnssa/commons/serialization/__init__.py
|
velociraptor111/coremltools
|
655b3be5cc0d42c3c4fa49f0f0e4a93a26b3e492
|
[
"BSD-3-Clause"
] | 51
|
2020-01-13T07:54:13.000Z
|
2022-03-17T09:11:56.000Z
|
coremltools/converters/nnssa/commons/serialization/__init__.py
|
velociraptor111/coremltools
|
655b3be5cc0d42c3c4fa49f0f0e4a93a26b3e492
|
[
"BSD-3-Clause"
] | 16
|
2020-03-06T09:26:03.000Z
|
2022-02-05T05:35:05.000Z
|
# -*- coding: utf-8 -*-
from __future__ import print_function as _
from __future__ import division as _
from __future__ import absolute_import as _
from .dump_impl import dump, dump_obj
from .file_writer import file_writer
from .file_reader import file_reader
| 32.5
| 43
| 0.811538
| 39
| 260
| 4.820513
| 0.435897
| 0.159574
| 0.255319
| 0.191489
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.004464
| 0.138462
| 260
| 7
| 44
| 37.142857
| 0.834821
| 0.080769
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0.166667
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
f450a8510479390f8de6a93b6c3642bf3e749748
| 68
|
py
|
Python
|
dpipe/torch/__init__.py
|
samokhinv/deep_pipe
|
9461b02f5f32c3e9f24490619ebccf417979cffc
|
[
"MIT"
] | 38
|
2017-09-08T04:51:17.000Z
|
2022-03-29T17:34:22.000Z
|
dpipe/torch/__init__.py
|
samokhinv/deep_pipe
|
9461b02f5f32c3e9f24490619ebccf417979cffc
|
[
"MIT"
] | 41
|
2017-09-29T22:06:21.000Z
|
2021-12-03T09:31:57.000Z
|
dpipe/torch/__init__.py
|
samokhinv/deep_pipe
|
9461b02f5f32c3e9f24490619ebccf417979cffc
|
[
"MIT"
] | 12
|
2017-09-08T04:40:39.000Z
|
2021-01-19T19:19:37.000Z
|
from .model import *
from .utils import *
from .functional import *
| 17
| 25
| 0.735294
| 9
| 68
| 5.555556
| 0.555556
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.176471
| 68
| 3
| 26
| 22.666667
| 0.892857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
f452e015f08c0d28367d5893ffe0680dba67228d
| 34
|
py
|
Python
|
01/mymodule.py
|
mariusgruenewald/lectures-2019
|
36812db370dfe7229be2df88b5020940394e54c0
|
[
"MIT"
] | 14
|
2019-01-11T09:47:18.000Z
|
2019-08-25T05:45:18.000Z
|
01/mymodule.py
|
mariusgruenewald/lectures-2019
|
36812db370dfe7229be2df88b5020940394e54c0
|
[
"MIT"
] | 19
|
2020-01-06T14:43:17.000Z
|
2020-05-17T14:49:12.000Z
|
01/mymodule.py
|
mariusgruenewald/lectures-2019
|
36812db370dfe7229be2df88b5020940394e54c0
|
[
"MIT"
] | 31
|
2019-02-11T09:23:44.000Z
|
2020-01-13T10:54:42.000Z
|
def myfunction(x):
return x**2
| 17
| 18
| 0.647059
| 6
| 34
| 3.666667
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.037037
| 0.205882
| 34
| 2
| 19
| 17
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
f45e4be000a17348664486f627f74934c81add1a
| 6,314
|
py
|
Python
|
examples/erniesage/models/message_passing.py
|
Nancy823/PGL
|
8be7e76a7d0f3ca28c3e69b505947ea5b68af7f3
|
[
"Apache-2.0"
] | 1
|
2021-04-22T17:30:12.000Z
|
2021-04-22T17:30:12.000Z
|
examples/erniesage/models/message_passing.py
|
cheeryoung79/PGL
|
fc517bbb87c570d0b854507769078c479d613914
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2020-04-29T13:38:01.000Z
|
2020-04-29T13:38:01.000Z
|
examples/erniesage/models/message_passing.py
|
cheeryoung79/PGL
|
fc517bbb87c570d0b854507769078c479d613914
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2021-09-01T12:00:31.000Z
|
2021-09-01T12:00:31.000Z
|
# Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
import paddle
import paddle.fluid as fluid
import paddle.fluid.layers as L
def copy_send(src_feat, dst_feat, edge_feat):
"""doc"""
return src_feat["h"]
def weighted_copy_send(src_feat, dst_feat, edge_feat):
"""doc"""
return src_feat["h"] * edge_feat["weight"]
def mean_recv(feat):
"""doc"""
return fluid.layers.sequence_pool(feat, pool_type="average")
def sum_recv(feat):
"""doc"""
return fluid.layers.sequence_pool(feat, pool_type="sum")
def max_recv(feat):
"""doc"""
return fluid.layers.sequence_pool(feat, pool_type="max")
def lstm_recv(feat):
"""doc"""
hidden_dim = 128
forward, _ = fluid.layers.dynamic_lstm(
input=feat, size=hidden_dim * 4, use_peepholes=False)
output = fluid.layers.sequence_last_step(forward)
return output
def graphsage_sum(gw, feature, hidden_size, act, initializer, learning_rate, name):
"""doc"""
msg = gw.send(copy_send, nfeat_list=[("h", feature)])
neigh_feature = gw.recv(msg, sum_recv)
self_feature = feature
self_feature = fluid.layers.fc(self_feature,
hidden_size,
act=act,
param_attr=fluid.ParamAttr(name=name + "_l.w_0", initializer=initializer,
learning_rate=learning_rate),
bias_attr=name+"_l.b_0"
)
neigh_feature = fluid.layers.fc(neigh_feature,
hidden_size,
act=act,
param_attr=fluid.ParamAttr(name=name + "_r.w_0", initializer=initializer,
learning_rate=learning_rate),
bias_attr=name+"_r.b_0"
)
output = fluid.layers.concat([self_feature, neigh_feature], axis=1)
output = fluid.layers.l2_normalize(output, axis=1)
return output
def graphsage_mean(gw, feature, hidden_size, act, initializer, learning_rate, name):
"""doc"""
msg = gw.send(copy_send, nfeat_list=[("h", feature)])
neigh_feature = gw.recv(msg, mean_recv)
self_feature = feature
self_feature = fluid.layers.fc(self_feature,
hidden_size,
act=act,
param_attr=fluid.ParamAttr(name=name + "_l.w_0", initializer=initializer,
learning_rate=learning_rate),
bias_attr=name+"_l.b_0"
)
neigh_feature = fluid.layers.fc(neigh_feature,
hidden_size,
act=act,
param_attr=fluid.ParamAttr(name=name + "_r.w_0", initializer=initializer,
learning_rate=learning_rate),
bias_attr=name+"_r.b_0"
)
output = fluid.layers.concat([self_feature, neigh_feature], axis=1)
output = fluid.layers.l2_normalize(output, axis=1)
return output
def pinsage_mean(gw, feature, hidden_size, act, initializer, learning_rate, name):
"""doc"""
msg = gw.send(weighted_copy_send, nfeat_list=[("h", feature)], efeat_list=["weight"])
neigh_feature = gw.recv(msg, mean_recv)
self_feature = feature
self_feature = fluid.layers.fc(self_feature,
hidden_size,
act=act,
param_attr=fluid.ParamAttr(name=name + "_l.w_0", initializer=initializer,
learning_rate=learning_rate),
bias_attr=name+"_l.b_0"
)
neigh_feature = fluid.layers.fc(neigh_feature,
hidden_size,
act=act,
param_attr=fluid.ParamAttr(name=name + "_r.w_0", initializer=initializer,
learning_rate=learning_rate),
bias_attr=name+"_r.b_0"
)
output = fluid.layers.concat([self_feature, neigh_feature], axis=1)
output = fluid.layers.l2_normalize(output, axis=1)
return output
def pinsage_sum(gw, feature, hidden_size, act, initializer, learning_rate, name):
"""doc"""
msg = gw.send(weighted_copy_send, nfeat_list=[("h", feature)], efeat_list=["weight"])
neigh_feature = gw.recv(msg, sum_recv)
self_feature = feature
self_feature = fluid.layers.fc(self_feature,
hidden_size,
act=act,
param_attr=fluid.ParamAttr(name=name + "_l.w_0", initializer=initializer,
learning_rate=learning_rate),
bias_attr=name+"_l.b_0"
)
neigh_feature = fluid.layers.fc(neigh_feature,
hidden_size,
act=act,
param_attr=fluid.ParamAttr(name=name + "_r.w_0", initializer=initializer,
learning_rate=learning_rate),
bias_attr=name+"_r.b_0"
)
output = fluid.layers.concat([self_feature, neigh_feature], axis=1)
output = fluid.layers.l2_normalize(output, axis=1)
return output
| 43.246575
| 109
| 0.536427
| 688
| 6,314
| 4.68314
| 0.196221
| 0.075109
| 0.063315
| 0.074488
| 0.759466
| 0.759466
| 0.759466
| 0.759466
| 0.759466
| 0.759466
| 0
| 0.010043
| 0.36918
| 6,314
| 145
| 110
| 43.544828
| 0.798895
| 0.09867
| 0
| 0.701923
| 0
| 0
| 0.023632
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.096154
| false
| 0
| 0.038462
| 0
| 0.230769
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
f4739d56b417ab0c67435100679555bf659a2dd6
| 2,020
|
py
|
Python
|
code/loader.py
|
SeanLee97/duReader_pytorch
|
1d55022ed0a87054f9a0d6e012a75a6380984264
|
[
"MIT"
] | 14
|
2018-02-22T04:29:21.000Z
|
2020-02-04T07:00:54.000Z
|
code/loader.py
|
SeanLee97/duReader_pytorch
|
1d55022ed0a87054f9a0d6e012a75a6380984264
|
[
"MIT"
] | null | null | null |
code/loader.py
|
SeanLee97/duReader_pytorch
|
1d55022ed0a87054f9a0d6e012a75a6380984264
|
[
"MIT"
] | 3
|
2018-01-13T16:31:04.000Z
|
2018-08-01T03:40:10.000Z
|
import h5py
import math
import torch
import torch.utils.data as data
class loadTrainDataset(data.Dataset):
def __init__(self, path):
self.file = h5py.File(path)
self.nb_samples = len(self.file['question'][:])
print('Dataset: ', self.nb_samples)
def __getitem__(self, index):
question = self.file['question'][index]
paragraph = self.file['paragraph'][index]
answer = self.file['answer'][index]
question_length = self.file['question_length'][index]
paragraph_length = self.file['paragraph_length'][index]
return question, paragraph, answer, question_length, paragraph_length
def __len__(self):
return self.nb_samples
class loadValDataset(data.Dataset):
def __init__(self, path):
self.file = h5py.File(path)
self.nb_samples = len(self.file['question'][:])
print('Dataset: ', self.nb_samples)
def __getitem__(self, index):
question_id = self.file['question_id'][index]
question = self.file['question'][index]
paragraphs = self.file['paragraphs'][index]
question_length = self.file['question_length'][index]
paragraph_lengths = self.file['paragraph_lengths'][index]
return question, paragraphs, question_length, paragraph_lengths
def __len__(self):
return self.nb_samples
class loadTestDataset(data.Dataset):
def __init__(self, path):
self.file = h5py.File(path)
self.nb_samples = len(self.file['question'][:])
print('Dataset: ', self.nb_samples)
def __getitem__(self, index):
question_id = self.file['question_id'][index]
question = self.file['question'][index]
paragraph = self.file['paragraph'][index]
question_length = self.file['question_length'][index]
paragraph_length = self.file['paragraph_length'][index]
return question_id, question, paragraph, question_length, paragraph_length
def __len__(self):
return self.nb_samples
| 36.071429
| 82
| 0.662871
| 235
| 2,020
| 5.408511
| 0.131915
| 0.132179
| 0.138474
| 0.042486
| 0.780488
| 0.780488
| 0.780488
| 0.780488
| 0.749803
| 0.70653
| 0
| 0.002513
| 0.211881
| 2,020
| 56
| 83
| 36.071429
| 0.795854
| 0
| 0
| 0.717391
| 0
| 0
| 0.111331
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.195652
| false
| 0
| 0.086957
| 0.065217
| 0.478261
| 0.065217
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
be56b5d523bea502c9a48fe042037921c7fa05ae
| 29,676
|
py
|
Python
|
qulab/tools/resonator_tools/circuit.py
|
liuqichun3809/quantum-lab
|
05bea707b314ea1687866f56ee439079336cfbbc
|
[
"MIT"
] | 3
|
2020-08-30T16:11:49.000Z
|
2021-03-05T12:09:30.000Z
|
qulab/tools/resonator_tools/circuit.py
|
liuqichun3809/quantum-lab
|
05bea707b314ea1687866f56ee439079336cfbbc
|
[
"MIT"
] | null | null | null |
qulab/tools/resonator_tools/circuit.py
|
liuqichun3809/quantum-lab
|
05bea707b314ea1687866f56ee439079336cfbbc
|
[
"MIT"
] | 2
|
2019-07-24T15:12:31.000Z
|
2019-09-20T02:17:28.000Z
|
import warnings
import numpy as np
import scipy.optimize as spopt
from scipy.constants import hbar
from scipy.interpolate import splrep, splev
from .utilities import plotting, save_load, Watt2dBm, dBm2Watt
from .circlefit import circlefit
from .calibration import calibration
##
## z_data_raw denotes the raw data
## z_data denotes the normalized data
##
class reflection_port(circlefit, save_load, plotting, calibration):
'''
normal direct port probed in reflection
'''
def __init__(self, f_data=None, z_data_raw=None):
self.porttype = 'direct'
self.fitresults = {}
self.z_data = None
if f_data is not None:
self.f_data = np.array(f_data)
else:
self.f_data=None
if z_data_raw is not None:
self.z_data_raw = np.array(z_data_raw)
else:
self.z_data=None
self.phasefitsmooth = 3
def _S11(self,f,fr,k_c,k_i):
'''
use either frequency or angular frequency units
for all quantities
k_l=k_c+k_i: total (loaded) coupling rate
k_c: coupling rate
k_i: internal loss rate
'''
return ((k_c-k_i)+2j*(f-fr))/((k_c+k_i)-2j*(f-fr))
def get_delay(self,f_data,z_data,delay=None,ignoreslope=True,guess=True):
'''
ignoreslope option not used here
retrieves the cable delay assuming the ideal resonance has a circular shape
modifies the cable delay until the shape Im(S21) vs Re(S21) is circular
see "do_calibration"
'''
maxval = np.max(np.absolute(z_data))
z_data = z_data/maxval
A1, A2, A3, A4, fr, Ql = self._fit_skewed_lorentzian(f_data,z_data)
if self.df_error/fr > 0.0001 or self.dQl_error/Ql>0.1:
#print("WARNING: Calibration using Lorentz fit failed, trying phase fit...")
A1 = np.mean(np.absolute(z_data))
A2 = 0.
A3 = 0.
A4 = 0.
#fr = np.mean(f_data)
f = splrep(f_data,np.unwrap(np.angle(z_data)),k=5,s=self.phasefitsmooth)
fr = f_data[np.argmax(np.absolute(splev(f_data,f,der=1)))]
Ql = 1e4
if ignoreslope==True:
A2 = 0.
else:
A2 = 0.
print("WARNING: The ignoreslope option is ignored! Corrections to the baseline should be done manually prior to fitting.")
print("see also: resonator_tools.calibration.fit_baseline_amp() etc. for help on fitting the baseline.")
print("There is also an example ipython notebook for using this function.")
print("However, make sure to understand the impact of the baseline (parasitic coupled resonances etc.) on your system.")
#z_data = (np.absolute(z_data)-A2*(f_data-fr)) * np.exp(np.angle(z_data)*1j) #usually not necessary
if delay is None:
if guess==True:
delay = self._guess_delay(f_data,z_data)
else:
delay=0.
delay = self._fit_delay(f_data,z_data,delay,maxiter=200)
params = [A1, A2, A3, A4, fr, Ql]
return delay, params
def do_calibration(self,f_data,z_data,ignoreslope=True,guessdelay=True,fixed_delay=None):
'''
calculating parameters for normalization
'''
delay, params = self.get_delay(f_data,z_data,ignoreslope=ignoreslope,guess=guessdelay,delay=fixed_delay)
z_data = (z_data-params[1]*(f_data-params[4]))*np.exp(2.*1j*np.pi*delay*f_data)
xc, yc, r0 = self._fit_circle(z_data)
zc = np.complex(xc,yc)
fitparams = self._phase_fit(f_data,self._center(z_data,zc),0.,np.absolute(params[5]),params[4])
theta, Ql, fr = fitparams
beta = self._periodic_boundary(theta+np.pi,np.pi) ###
offrespoint = np.complex((xc+r0*np.cos(beta)),(yc+r0*np.sin(beta)))
alpha = self._periodic_boundary(np.angle(offrespoint)+np.pi,np.pi)
#a = np.absolute(offrespoint)
#alpha = np.angle(zc)
a = r0 + np.absolute(zc)
return delay, a, alpha, fr, Ql, params[1], params[4]
def do_normalization(self,f_data,z_data,delay,amp_norm,alpha,A2,frcal):
'''
transforming resonator into canonical position
'''
return (z_data-A2*(f_data-frcal))/amp_norm*np.exp(1j*(-alpha+2.*np.pi*delay*f_data))
def circlefit(self,f_data,z_data,fr=None,Ql=None,refine_results=False,calc_errors=True):
'''
S11 version of the circlefit
'''
if fr is None: fr=f_data[np.argmin(np.absolute(z_data))]
if Ql is None: Ql=1e6
xc, yc, r0 = self._fit_circle(z_data,refine_results=refine_results)
phi0 = -np.arcsin(yc/r0)
theta0 = self._periodic_boundary(phi0+np.pi,np.pi)
z_data_corr = self._center(z_data,np.complex(xc,yc))
theta0, Ql, fr = self._phase_fit(f_data,z_data_corr,theta0,Ql,fr)
#print("Ql from phasefit is: " + str(Ql))
Qi = Ql/(1.-r0)
Qc = 1./(1./Ql-1./Qi)
results = {"Qi":Qi,"Qc":Qc,"Ql":Ql,"fr":fr,"theta0":theta0}
#calculation of the error
p = [fr,Qc,Ql]
#chi_square, errors = rt.get_errors(rt.residuals_notch_ideal,f_data,z_data,p)
if calc_errors==True:
chi_square, cov = self._get_cov_fast_directrefl(f_data,z_data,p)
#chi_square, cov = rt.get_cov(rt.residuals_notch_ideal,f_data,z_data,p)
if cov is not None:
errors = np.sqrt(np.diagonal(cov))
fr_err,Qc_err,Ql_err = errors
#calc Qi with error prop (sum the squares of the variances and covariaces)
dQl = 1./((1./Ql-1./Qc)**2*Ql**2)
dQc = - 1./((1./Ql-1./Qc)**2*Qc**2)
Qi_err = np.sqrt((dQl**2*cov[2][2]) + (dQc**2*cov[1][1])+(2*dQl*dQc*cov[2][1])) #with correlations
errors = {"Ql_err":Ql_err, "Qc_err":Qc_err, "fr_err":fr_err,"chi_square":chi_square,"Qi_err":Qi_err}
results.update( errors )
else:
print("WARNING: Error calculation failed!")
else:
#just calc chisquared:
fun2 = lambda x: self._residuals_notch_ideal(x,f_data,z_data)**2
chi_square = 1./float(len(f_data)-len(p)) * (fun2(p)).sum()
errors = {"chi_square":chi_square}
results.update(errors)
return results
def autofit(self,electric_delay=None,fcrop=None):
'''
automatic calibration and fitting
electric_delay: set the electric delay manually
fcrop = (f1,f2) : crop the frequency range used for fitting
'''
if fcrop is None:
self._fid = np.ones(self.f_data.size,dtype=bool)
else:
f1, f2 = fcrop
self._fid = np.logical_and(self.f_data>=f1,self.f_data<=f2)
delay, amp_norm, alpha, fr, Ql, A2, frcal =\
self.do_calibration(self.f_data[self._fid],self.z_data_raw[self._fid],ignoreslope=True,guessdelay=False,fixed_delay=electric_delay)
self.z_data = self.do_normalization(self.f_data,self.z_data_raw,delay,amp_norm,alpha,A2,frcal)
self.fitresults = self.circlefit(self.f_data[self._fid],self.z_data[self._fid],fr,Ql,refine_results=False,calc_errors=True)
self.z_data_sim = A2*(self.f_data-frcal)+self._S11_directrefl(self.f_data,fr=self.fitresults["fr"],Ql=self.fitresults["Ql"],Qc=self.fitresults["Qc"],a=amp_norm,alpha=alpha,delay=delay)
self.z_data_sim_norm = self._S11_directrefl(self.f_data,fr=self.fitresults["fr"],Ql=self.fitresults["Ql"],Qc=self.fitresults["Qc"],a=1.,alpha=0.,delay=0.)
self._delay = delay
def GUIfit(self):
'''
automatic fit with possible user interaction to crop the data and modify the electric delay
f1,f2,delay are determined in the GUI. Then, data is cropped and autofit with delay is performed
'''
#copy data
fmin, fmax = self.f_data.min(), self.f_data.max()
self.autofit()
self.__delay = self._delay
#prepare plot and slider
import matplotlib.pyplot as plt
from matplotlib.widgets import Slider, Button
fig, ((ax2,ax0),(ax1,ax3)) = plt.subplots(nrows=2,ncols=2)
plt.suptitle('Normalized data. Use the silders to improve the fitting if necessary.')
plt.subplots_adjust(left=0.25, bottom=0.25)
l0, = ax0.plot(self.f_data*1e-9,np.absolute(self.z_data))
l1, = ax1.plot(self.f_data*1e-9,np.angle(self.z_data))
l2, = ax2.plot(np.real(self.z_data),np.imag(self.z_data))
l0s, = ax0.plot(self.f_data*1e-9,np.absolute(self.z_data_sim_norm))
l1s, = ax1.plot(self.f_data*1e-9,np.angle(self.z_data_sim_norm))
l2s, = ax2.plot(np.real(self.z_data_sim_norm),np.imag(self.z_data_sim_norm))
ax0.set_xlabel('f (GHz)')
ax1.set_xlabel('f (GHz)')
ax2.set_xlabel('real')
ax0.set_ylabel('amp')
ax1.set_ylabel('phase (rad)')
ax2.set_ylabel('imagl')
fr_ann = ax3.annotate('fr = %e Hz +- %e Hz' % (self.fitresults['fr'],self.fitresults['fr_err']),xy=(0.1, 0.8), xycoords='axes fraction')
Ql_ann = ax3.annotate('Ql = %e +- %e' % (self.fitresults['Ql'],self.fitresults['Ql_err']),xy=(0.1, 0.6), xycoords='axes fraction')
Qc_ann = ax3.annotate('Qc = %e +- %e' % (self.fitresults['Qc'],self.fitresults['Qc_err']),xy=(0.1, 0.4), xycoords='axes fraction')
Qi_ann = ax3.annotate('Qi = %e +- %e' % (self.fitresults['Qi'],self.fitresults['Qi_err']),xy=(0.1, 0.2), xycoords='axes fraction')
axcolor = 'lightgoldenrodyellow'
axdelay = plt.axes([0.25, 0.05, 0.65, 0.03], axisbg=axcolor)
axf2 = plt.axes([0.25, 0.1, 0.65, 0.03], axisbg=axcolor)
axf1 = plt.axes([0.25, 0.15, 0.65, 0.03], axisbg=axcolor)
sscale = 10.
sdelay = Slider(axdelay, 'delay', -1., 1., valinit=self.__delay/(sscale*self.__delay),valfmt='%f')
df = (fmax-fmin)*0.05
sf2 = Slider(axf2, 'f2', (fmin-df)*1e-9, (fmax+df)*1e-9, valinit=fmax*1e-9,valfmt='%.10f GHz')
sf1 = Slider(axf1, 'f1', (fmin-df)*1e-9, (fmax+df)*1e-9, valinit=fmin*1e-9,valfmt='%.10f GHz')
def update(val):
self.autofit(electric_delay=sdelay.val*sscale*self.__delay,fcrop=(sf1.val*1e9,sf2.val*1e9))
l0.set_data(self.f_data*1e-9,np.absolute(self.z_data))
l1.set_data(self.f_data*1e-9,np.angle(self.z_data))
l2.set_data(np.real(self.z_data),np.imag(self.z_data))
l0s.set_data(self.f_data[self._fid]*1e-9,np.absolute(self.z_data_sim_norm[self._fid]))
l1s.set_data(self.f_data[self._fid]*1e-9,np.angle(self.z_data_sim_norm[self._fid]))
l2s.set_data(np.real(self.z_data_sim_norm[self._fid]),np.imag(self.z_data_sim_norm[self._fid]))
fr_ann.set_text('fr = %e Hz +- %e Hz' % (self.fitresults['fr'],self.fitresults['fr_err']))
Ql_ann.set_text('Ql = %e +- %e' % (self.fitresults['Ql'],self.fitresults['Ql_err']))
Qc_ann.set_text('Qc = %e +- %e' % (self.fitresults['Qc'],self.fitresults['Qc_err']))
Qi_ann.set_text('Qi = %e +- %e' % (self.fitresults['Qi'],self.fitresults['Qi_err']))
fig.canvas.draw_idle()
def btnclicked(event):
self.autofit(electric_delay=None,fcrop=(sf1.val*1e9,sf2.val*1e9))
self.__delay = self._delay
sdelay.reset()
update(event)
sf1.on_changed(update)
sf2.on_changed(update)
sdelay.on_changed(update)
btnax = plt.axes([0.05, 0.1, 0.1, 0.04])
button = Button(btnax, 'auto-delay', color=axcolor, hovercolor='0.975')
button.on_clicked(btnclicked)
plt.show()
plt.close()
def _S11_directrefl(self,f,fr=10e9,Ql=900,Qc=1000.,a=1.,alpha=0.,delay=.0):
'''
full model for notch type resonances
'''
return a*np.exp(np.complex(0,alpha))*np.exp(-2j*np.pi*f*delay) * ( 2.*Ql/Qc - 1. + 2j*Ql*(fr-f)/fr ) / ( 1. - 2j*Ql*(fr-f)/fr )
def get_single_photon_limit(self,unit='dBm'):
'''
returns the amout of power in units of W necessary
to maintain one photon on average in the cavity
unit can be 'dbm' or 'watt'
'''
if self.fitresults!={}:
fr = self.fitresults['fr']
k_c = 2*np.pi*fr/self.fitresults['Qc']
k_i = 2*np.pi*fr/self.fitresults['Qi']
if unit=='dBm':
return Watt2dBm(1./(4.*k_c/(2.*np.pi*hbar*fr*(k_c+k_i)**2)))
elif unit=='watt':
return 1./(4.*k_c/(2.*np.pi*hbar*fr*(k_c+k_i)**2))
else:
warnings.warn('Please perform the fit first',UserWarning)
return None
def get_photons_in_resonator(self,power,unit='dBm'):
'''
returns the average number of photons
for a given power (defaul unit is 'dbm')
unit can be 'dBm' or 'watt'
'''
if self.fitresults!={}:
if unit=='dBm':
power = dBm2Watt(power)
fr = self.fitresults['fr']
k_c = 2*np.pi*fr/self.fitresults['Qc']
k_i = 2*np.pi*fr/self.fitresults['Qi']
return 4.*k_c/(2.*np.pi*hbar*fr*(k_c+k_i)**2) * power
else:
warnings.warn('Please perform the fit first',UserWarning)
return None
class notch_port(circlefit, save_load, plotting, calibration):
'''
notch type port probed in transmission
'''
def __init__(self, f_data=None, z_data_raw=None):
self.porttype = 'notch'
self.fitresults = {}
self.z_data = None
if f_data is not None:
self.f_data = np.array(f_data)
else:
self.f_data=None
if z_data_raw is not None:
self.z_data_raw = np.array(z_data_raw)
else:
self.z_data_raw=None
def get_delay(self,f_data,z_data,delay=None,ignoreslope=True,guess=True):
'''
retrieves the cable delay assuming the ideal resonance has a circular shape
modifies the cable delay until the shape Im(S21) vs Re(S21) is circular
see "do_calibration"
'''
maxval = np.max(np.absolute(z_data))
z_data = z_data/maxval
A1, A2, A3, A4, fr, Ql = self._fit_skewed_lorentzian(f_data,z_data)
if ignoreslope==True:
A2 = 0.
else:
A2 = 0.
print("WARNING: The ignoreslope option is ignored! Corrections to the baseline should be done manually prior to fitting.")
print("see also: resonator_tools.calibration.fit_baseline_amp() etc. for help on fitting the baseline.")
print("There is also an example ipython notebook for using this function.")
print("However, make sure to understand the impact of the baseline (parasitic coupled resonances etc.) on your system.")
#z_data = (np.absolute(z_data)-A2*(f_data-fr)) * np.exp(np.angle(z_data)*1j) #usually not necessary
if delay is None:
if guess==True:
delay = self._guess_delay(f_data,z_data)
else:
delay=0.
delay = self._fit_delay(f_data,z_data,delay,maxiter=200)
params = [A1, A2, A3, A4, fr, Ql]
return delay, params
def do_calibration(self,f_data,z_data,ignoreslope=True,guessdelay=True,fixed_delay=None):
'''
performs an automated calibration and tries to determine the prefactors a, alpha, delay
fr, Ql, and a possible slope are extra information, which can be used as start parameters for subsequent fits
see also "do_normalization"
the calibration procedure works for transmission line resonators as well
'''
delay, params = self.get_delay(f_data,z_data,ignoreslope=ignoreslope,guess=guessdelay,delay=fixed_delay)
z_data = (z_data-params[1]*(f_data-params[4]))*np.exp(2.*1j*np.pi*delay*f_data)
xc, yc, r0 = self._fit_circle(z_data)
zc = np.complex(xc,yc)
fitparams = self._phase_fit(f_data,self._center(z_data,zc),0.,np.absolute(params[5]),params[4])
theta, Ql, fr = fitparams
beta = self._periodic_boundary(theta+np.pi,np.pi)
offrespoint = np.complex((xc+r0*np.cos(beta)),(yc+r0*np.sin(beta)))
alpha = np.angle(offrespoint)
a = np.absolute(offrespoint)
return delay, a, alpha, fr, Ql, params[1], params[4]
def do_normalization(self,f_data,z_data,delay,amp_norm,alpha,A2,frcal):
'''
removes the prefactors a, alpha, delay and returns the calibrated data, see also "do_calibration"
works also for transmission line resonators
'''
return (z_data-A2*(f_data-frcal))/amp_norm*np.exp(1j*(-alpha+2.*np.pi*delay*f_data))
def circlefit(self,f_data,z_data,fr=None,Ql=None,refine_results=False,calc_errors=True):
'''
performs a circle fit on a frequency vs. complex resonator scattering data set
Data has to be normalized!!
INPUT:
f_data,z_data: input data (frequency, complex S21 data)
OUTPUT:
outpus a dictionary {key:value} consisting of the fit values, errors and status information about the fit
values: {"phi0":phi0, "Ql":Ql, "absolute(Qc)":absQc, "Qi": Qi, "electronic_delay":delay, "complexQc":complQc, "resonance_freq":fr, "prefactor_a":a, "prefactor_alpha":alpha}
errors: {"phi0_err":phi0_err, "Ql_err":Ql_err, "absolute(Qc)_err":absQc_err, "Qi_err": Qi_err, "electronic_delay_err":delay_err, "resonance_freq_err":fr_err, "prefactor_a_err":a_err, "prefactor_alpha_err":alpha_err}
for details, see:
[1] (not diameter corrected) Jiansong Gao, "The Physics of Superconducting Microwave Resonators" (PhD Thesis), Appendix E, California Institute of Technology, (2008)
[2] (diameter corrected) M. S. Khalil, et. al., J. Appl. Phys. 111, 054510 (2012)
[3] (fitting techniques) N. CHERNOV AND C. LESORT, "Least Squares Fitting of Circles", Journal of Mathematical Imaging and Vision 23, 239, (2005)
[4] (further fitting techniques) P. J. Petersan, S. M. Anlage, J. Appl. Phys, 84, 3392 (1998)
the program fits the circle with the algebraic technique described in [3], the rest of the fitting is done with the scipy.optimize least square fitting toolbox
also, check out [5] S. Probst et al. "Efficient and reliable analysis of noisy complex scatterung resonator data for superconducting quantum circuits" (in preparation)
'''
if fr is None: fr=f_data[np.argmin(np.absolute(z_data))]
if Ql is None: Ql=1e6
xc, yc, r0 = self._fit_circle(z_data,refine_results=refine_results)
phi0 = -np.arcsin(yc/r0)
theta0 = self._periodic_boundary(phi0+np.pi,np.pi)
z_data_corr = self._center(z_data,np.complex(xc,yc))
theta0, Ql, fr = self._phase_fit(f_data,z_data_corr,theta0,Ql,fr)
#print("Ql from phasefit is: " + str(Ql))
absQc = Ql/(2.*r0)
complQc = absQc*np.exp(1j*((-1.)*phi0))
Qc = 1./(1./complQc).real # here, taking the real part of (1/complQc) from diameter correction method
Qi_dia_corr = 1./(1./Ql-1./Qc)
Qi_no_corr = 1./(1./Ql-1./absQc)
results = {"Qi_dia_corr":Qi_dia_corr,"Qi_no_corr":Qi_no_corr,"absQc":absQc,"Qc_dia_corr":Qc,"Ql":Ql,"fr":fr,"theta0":theta0,"phi0":phi0}
#calculation of the error
p = [fr,absQc,Ql,phi0]
#chi_square, errors = rt.get_errors(rt.residuals_notch_ideal,f_data,z_data,p)
if calc_errors==True:
chi_square, cov = self._get_cov_fast_notch(f_data,z_data,p)
#chi_square, cov = rt.get_cov(rt.residuals_notch_ideal,f_data,z_data,p)
if cov is not None:
errors = np.sqrt(np.diagonal(cov))
fr_err,absQc_err,Ql_err,phi0_err = errors
#calc Qi with error prop (sum the squares of the variances and covariaces)
dQl = 1./((1./Ql-1./absQc)**2*Ql**2)
dabsQc = - 1./((1./Ql-1./absQc)**2*absQc**2)
Qi_no_corr_err = np.sqrt((dQl**2*cov[2][2]) + (dabsQc**2*cov[1][1])+(2*dQl*dabsQc*cov[2][1])) #with correlations
#calc Qi dia corr with error prop
dQl = 1/((1/Ql-np.cos(phi0)/absQc)**2 *Ql**2)
dabsQc = -np.cos(phi0)/((1/Ql-np.cos(phi0)/absQc)**2 *absQc**2)
dphi0 = -np.sin(phi0)/((1/Ql-np.cos(phi0)/absQc)**2 *absQc)
##err1 = ( (dQl*cov[2][2])**2 + (dabsQc*cov[1][1])**2 + (dphi0*cov[3][3])**2 )
err1 = ( (dQl**2*cov[2][2]) + (dabsQc**2*cov[1][1]) + (dphi0**2*cov[3][3]) )
err2 = ( dQl*dabsQc*cov[2][1] + dQl*dphi0*cov[2][3] + dabsQc*dphi0*cov[1][3] )
Qi_dia_corr_err = np.sqrt(err1+2*err2) # including correlations
errors = {"phi0_err":phi0_err, "Ql_err":Ql_err, "absQc_err":absQc_err, "fr_err":fr_err,"chi_square":chi_square,"Qi_no_corr_err":Qi_no_corr_err,"Qi_dia_corr_err": Qi_dia_corr_err}
results.update( errors )
else:
print("WARNING: Error calculation failed!")
else:
#just calc chisquared:
fun2 = lambda x: self._residuals_notch_ideal(x,f_data,z_data)**2
chi_square = 1./float(len(f_data)-len(p)) * (fun2(p)).sum()
errors = {"chi_square":chi_square}
results.update(errors)
return results
def autofit(self,electric_delay=None,fcrop=None):
'''
automatic calibration and fitting
electric_delay: set the electric delay manually
fcrop = (f1,f2) : crop the frequency range used for fitting
'''
if fcrop is None:
self._fid = np.ones(self.f_data.size,dtype=bool)
else:
f1, f2 = fcrop
self._fid = np.logical_and(self.f_data>=f1,self.f_data<=f2)
delay, amp_norm, alpha, fr, Ql, A2, frcal =\
self.do_calibration(self.f_data[self._fid],self.z_data_raw[self._fid],ignoreslope=True,guessdelay=True,fixed_delay=electric_delay)
self.z_data = self.do_normalization(self.f_data,self.z_data_raw,delay,amp_norm,alpha,A2,frcal)
self.fitresults = self.circlefit(self.f_data[self._fid],self.z_data[self._fid],fr,Ql,refine_results=False,calc_errors=True)
self.z_data_sim = A2*(self.f_data-frcal)+self._S21_notch(self.f_data,fr=self.fitresults["fr"],Ql=self.fitresults["Ql"],Qc=self.fitresults["absQc"],phi=self.fitresults["phi0"],a=amp_norm,alpha=alpha,delay=delay)
self.z_data_sim_norm = self._S21_notch(self.f_data,fr=self.fitresults["fr"],Ql=self.fitresults["Ql"],Qc=self.fitresults["absQc"],phi=self.fitresults["phi0"],a=1.0,alpha=0.,delay=0.)
self._delay = delay
def GUIfit(self):
'''
automatic fit with possible user interaction to crop the data and modify the electric delay
f1,f2,delay are determined in the GUI. Then, data is cropped and autofit with delay is performed
'''
#copy data
fmin, fmax = self.f_data.min(), self.f_data.max()
self.autofit()
self.__delay = self._delay
#prepare plot and slider
import matplotlib.pyplot as plt
from matplotlib.widgets import Slider, Button
fig, ((ax2,ax0),(ax1,ax3)) = plt.subplots(nrows=2,ncols=2)
plt.suptitle('Normalized data. Use the silders to improve the fitting if necessary.')
plt.subplots_adjust(left=0.25, bottom=0.25)
l0, = ax0.plot(self.f_data*1e-9,np.absolute(self.z_data))
l1, = ax1.plot(self.f_data*1e-9,np.angle(self.z_data))
l2, = ax2.plot(np.real(self.z_data),np.imag(self.z_data))
l0s, = ax0.plot(self.f_data*1e-9,np.absolute(self.z_data_sim_norm))
l1s, = ax1.plot(self.f_data*1e-9,np.angle(self.z_data_sim_norm))
l2s, = ax2.plot(np.real(self.z_data_sim_norm),np.imag(self.z_data_sim_norm))
ax0.set_xlabel('f (GHz)')
ax1.set_xlabel('f (GHz)')
ax2.set_xlabel('real')
ax0.set_ylabel('amp')
ax1.set_ylabel('phase (rad)')
ax2.set_ylabel('imagl')
fr_ann = ax3.annotate('fr = %e Hz +- %e Hz' % (self.fitresults['fr'],self.fitresults['fr_err']),xy=(0.1, 0.8), xycoords='axes fraction')
Ql_ann = ax3.annotate('Ql = %e +- %e' % (self.fitresults['Ql'],self.fitresults['Ql_err']),xy=(0.1, 0.6), xycoords='axes fraction')
Qc_ann = ax3.annotate('Qc = %e +- %e' % (self.fitresults['absQc'],self.fitresults['absQc_err']),xy=(0.1, 0.4), xycoords='axes fraction')
Qi_ann = ax3.annotate('Qi = %e +- %e' % (self.fitresults['Qi_dia_corr'],self.fitresults['Qi_dia_corr_err']),xy=(0.1, 0.2), xycoords='axes fraction')
axcolor = 'lightgoldenrodyellow'
axdelay = plt.axes([0.25, 0.05, 0.65, 0.03], axisbg=axcolor)
axf2 = plt.axes([0.25, 0.1, 0.65, 0.03], axisbg=axcolor)
axf1 = plt.axes([0.25, 0.15, 0.65, 0.03], axisbg=axcolor)
sscale = 10.
sdelay = Slider(axdelay, 'delay', -1., 1., valinit=self.__delay/(sscale*self.__delay),valfmt='%f')
df = (fmax-fmin)*0.05
sf2 = Slider(axf2, 'f2', (fmin-df)*1e-9, (fmax+df)*1e-9, valinit=fmax*1e-9,valfmt='%.10f GHz')
sf1 = Slider(axf1, 'f1', (fmin-df)*1e-9, (fmax+df)*1e-9, valinit=fmin*1e-9,valfmt='%.10f GHz')
def update(val):
self.autofit(electric_delay=sdelay.val*sscale*self.__delay,fcrop=(sf1.val*1e9,sf2.val*1e9))
l0.set_data(self.f_data*1e-9,np.absolute(self.z_data))
l1.set_data(self.f_data*1e-9,np.angle(self.z_data))
l2.set_data(np.real(self.z_data),np.imag(self.z_data))
l0s.set_data(self.f_data[self._fid]*1e-9,np.absolute(self.z_data_sim_norm[self._fid]))
l1s.set_data(self.f_data[self._fid]*1e-9,np.angle(self.z_data_sim_norm[self._fid]))
l2s.set_data(np.real(self.z_data_sim_norm[self._fid]),np.imag(self.z_data_sim_norm[self._fid]))
fr_ann.set_text('fr = %e Hz +- %e Hz' % (self.fitresults['fr'],self.fitresults['fr_err']))
Ql_ann.set_text('Ql = %e +- %e' % (self.fitresults['Ql'],self.fitresults['Ql_err']))
Qc_ann.set_text('|Qc| = %e +- %e' % (self.fitresults['absQc'],self.fitresults['absQc_err']))
Qi_ann.set_text('Qi_dia_corr = %e +- %e' % (self.fitresults['Qi_dia_corr'],self.fitresults['Qi_dia_corr_err']))
fig.canvas.draw_idle()
def btnclicked(event):
self.autofit(electric_delay=None,fcrop=(sf1.val*1e9,sf2.val*1e9))
self.__delay = self._delay
sdelay.reset()
update(event)
sf1.on_changed(update)
sf2.on_changed(update)
sdelay.on_changed(update)
btnax = plt.axes([0.05, 0.1, 0.1, 0.04])
button = Button(btnax, 'auto-delay', color=axcolor, hovercolor='0.975')
button.on_clicked(btnclicked)
plt.show()
plt.close()
def _S21_notch(self,f,fr=10e9,Ql=900,Qc=1000.,phi=0.,a=1.,alpha=0.,delay=.0):
'''
full model for notch type resonances
'''
return a*np.exp(np.complex(0,alpha))*np.exp(-2j*np.pi*f*delay)*(1.-Ql/Qc*np.exp(1j*phi)/(1.+2j*Ql*(f-fr)/fr))
def get_single_photon_limit(self,unit='dBm',diacorr=True):
'''
returns the amout of power in units of W necessary
to maintain one photon on average in the cavity
unit can be 'dBm' or 'watt'
'''
if self.fitresults!={}:
fr = self.fitresults['fr']
if diacorr:
k_c = 2*np.pi*fr/self.fitresults['Qc_dia_corr']
k_i = 2*np.pi*fr/self.fitresults['Qi_dia_corr']
else:
k_c = 2*np.pi*fr/self.fitresults['absQc']
k_i = 2*np.pi*fr/self.fitresults['Qi_no_corr']
if unit=='dBm':
return Watt2dBm(1./(4.*k_c/(2.*np.pi*hbar*fr*(k_c+k_i)**2)))
elif unit=='watt':
return 1./(4.*k_c/(2.*np.pi*hbar*fr*(k_c+k_i)**2))
else:
warnings.warn('Please perform the fit first',UserWarning)
return None
def get_photons_in_resonator(self,power,unit='dBm',diacorr=True):
'''
returns the average number of photons
for a given power in units of W
unit can be 'dBm' or 'watt'
'''
if self.fitresults!={}:
if unit=='dBm':
power = dBm2Watt(power)
fr = self.fitresults['fr']
if diacorr:
k_c = 2*np.pi*fr/self.fitresults['Qc_dia_corr']
k_i = 2*np.pi*fr/self.fitresults['Qi_dia_corr']
else:
k_c = 2*np.pi*fr/self.fitresults['absQc']
k_i = 2*np.pi*fr/self.fitresults['Qi_no_corr']
return 4.*k_c/(2.*np.pi*hbar*fr*(k_c+k_i)**2) * power
else:
warnings.warn('Please perform the fit first',UserWarning)
return None
class transmission_port(circlefit,save_load,plotting):
'''
a class for handling transmission measurements
'''
def __init__(self,f_data=None,z_data_raw=None):
self.porttype = 'transm'
self.fitresults = {}
if f_data is not None:
self.f_data = np.array(f_data)
else:
self.f_data=None
if z_data_raw is not None:
self.z_data_raw = np.array(z_data_raw)
else:
self.z_data=None
def _S21(self,f,fr,Ql,A):
return A**2/(1.+4.*Ql**2*((f-fr)/fr)**2)
def fit(self):
self.ampsqr = (np.absolute(self.z_data_raw))**2
p = [self.f_data[np.argmax(self.ampsqr)],1000.,np.amax(self.ampsqr)]
popt, pcov = spopt.curve_fit(self._S21, self.f_data, self.ampsqr,p)
errors = np.sqrt(np.diag(pcov))
self.fitresults = {'fr':popt[0],'fr_err':errors[0],'Ql':popt[1],'Ql_err':errors[1],'Ampsqr':popt[2],'Ampsqr_err':errors[2]}
class resonator(object):
'''
Universal resonator analysis class
It can handle different kinds of ports and assymetric resonators.
'''
def __init__(self, ports = {}, comment = None):
'''
initializes the resonator class object
ports (dictionary {key:value}): specify the name and properties of the coupling ports
e.g. ports = {'1':'direct', '2':'notch'}
comment: add a comment
'''
self.comment = comment
self.port = {}
self.transm = {}
if len(ports) > 0:
for key, pname in iter(ports.items()):
if pname=='direct':
self.port.update({key:reflection_port()})
elif pname=='notch':
self.port.update({key:notch_port()})
else:
warnings.warn("Undefined input type! Use 'direct' or 'notch'.", SyntaxWarning)
if len(self.port) == 0: warnings.warn("Resonator has no coupling ports!", UserWarning)
def add_port(self,key,pname):
if pname=='direct':
self.port.update({key:reflection_port()})
elif pname=='notch':
self.port.update({key:notch_port()})
else:
warnings.warn("Undefined input type! Use 'direct' or 'notch'.", SyntaxWarning)
if len(self.port) == 0: warnings.warn("Resonator has no coupling ports!", UserWarning)
def delete_port(self,key):
del self.port[key]
if len(self.port) == 0: warnings.warn("Resonator has no coupling ports!", UserWarning)
def get_Qi(self):
'''
based on the number of ports and the corresponding measurements
it calculates the internal losses
'''
pass
def get_single_photon_limit(self,port):
'''
returns the amout of power necessary to maintain one photon
on average in the cavity
'''
pass
def get_photons_in_resonator(self,power,port):
'''
returns the average number of photons
for a given power
'''
pass
def add_transm_meas(self,port1, port2):
'''
input: port1
output: port2
adds a transmission measurement
connecting two direct ports S21
'''
key = port1 + " -> " + port2
self.port.update({key:transm()})
pass
class batch_processing(object):
'''
A class for batch processing of resonator data as a function of another variable
Typical applications are power scans, magnetic field scans etc.
'''
def __init__(self,porttype):
'''
porttype = 'notch', 'direct', 'transm'
results is an array of dictionaries containing the fitresults
'''
self.porttype = porttype
self.results = []
def autofit(self,cal_dataslice = 0):
'''
fits all data
cal_dataslice: choose scatteringdata which should be used for calibration
of the amplitude and phase, default = 0 (first)
'''
pass
class coupled_resonators(batch_processing):
'''
A class for fitting a resonator coupled to a second one
'''
def __init__(self,porttype):
self.porttype = porttype
self.results = []
#def GUIfit(porttype,f_data,z_data_raw):
# '''
# GUI based fitting process enabeling cutting the data and manually setting the delay
# It employs the Matplotlib widgets
# return f1, f2 and delay, which should be employed for the real fitting
# '''
# if porttype=='direct':
# p = reflection_port(f_data=f_data,z_data_raw=z_data_raw)
# elif porttype =='notch':
# p = notch_port(f_data=f_data,z_data_raw=z_data_raw)
# else:
# warnings.warn('Not supported!')
# return None
# import matplotlib.pyplot as plt
# from matplotlib.widgets import Slider, Button, RadioButtons
# #plt.style.use('ggplot')
# fig, axes = plt.subplots(nrows=2,ncols=2)
#
# return f1,f2,delay
| 42.273504
| 217
| 0.694467
| 5,061
| 29,676
| 3.912863
| 0.113812
| 0.032318
| 0.025905
| 0.015149
| 0.756855
| 0.742918
| 0.723022
| 0.717316
| 0.71373
| 0.704338
| 0
| 0.031
| 0.142337
| 29,676
| 702
| 218
| 42.273504
| 0.747053
| 0.234364
| 0
| 0.762749
| 0
| 0.004435
| 0.108914
| 0.004135
| 0
| 0
| 0
| 0
| 0
| 1
| 0.084257
| false
| 0.011086
| 0.026608
| 0.002217
| 0.172949
| 0.022173
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
be6454dcd7dffe1eb46f400b2579bc4c68018ce4
| 25
|
py
|
Python
|
event.grid-client.python/main/__init__.py
|
enjector/enjector-event.grid
|
7dfec2a1e155bf6b3ba25b1d6b133a3237b7ba14
|
[
"Apache-2.0"
] | null | null | null |
event.grid-client.python/main/__init__.py
|
enjector/enjector-event.grid
|
7dfec2a1e155bf6b3ba25b1d6b133a3237b7ba14
|
[
"Apache-2.0"
] | 11
|
2020-08-08T16:10:32.000Z
|
2020-08-11T06:35:51.000Z
|
event.grid-client.python/main/__init__.py
|
enjector/enjector-event.grid-server
|
7dfec2a1e155bf6b3ba25b1d6b133a3237b7ba14
|
[
"Apache-2.0"
] | null | null | null |
from .eventgrid import *
| 12.5
| 24
| 0.76
| 3
| 25
| 6.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.16
| 25
| 1
| 25
| 25
| 0.904762
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
be8064b4b41f111bfa023afcd8b3680ca3cdc30b
| 167
|
py
|
Python
|
sw/control/__init__.py
|
christopherco/moabian
|
29b623d60212ba4daa18e3ca9aeed364390533e6
|
[
"MIT"
] | null | null | null |
sw/control/__init__.py
|
christopherco/moabian
|
29b623d60212ba4daa18e3ca9aeed364390533e6
|
[
"MIT"
] | null | null | null |
sw/control/__init__.py
|
christopherco/moabian
|
29b623d60212ba4daa18e3ca9aeed364390533e6
|
[
"MIT"
] | null | null | null |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
from .debug import *
from .device import *
from .timers import *
from .perfcounters import *
| 20.875
| 38
| 0.748503
| 21
| 167
| 5.952381
| 0.714286
| 0.24
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.167665
| 167
| 7
| 39
| 23.857143
| 0.899281
| 0.407186
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
beb4e0a933714af799118ed326bf5e54ee4ed540
| 19,827
|
py
|
Python
|
lingvo/core/spectrum_augmenter_on_device_test.py
|
Singed-jj/lingvo
|
a2a4ac8bd835ffc2f95fc38ee3e9bc17c30fcc56
|
[
"Apache-2.0"
] | null | null | null |
lingvo/core/spectrum_augmenter_on_device_test.py
|
Singed-jj/lingvo
|
a2a4ac8bd835ffc2f95fc38ee3e9bc17c30fcc56
|
[
"Apache-2.0"
] | null | null | null |
lingvo/core/spectrum_augmenter_on_device_test.py
|
Singed-jj/lingvo
|
a2a4ac8bd835ffc2f95fc38ee3e9bc17c30fcc56
|
[
"Apache-2.0"
] | null | null | null |
# Lint as: python3
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for spectrum augmenter layer."""
import lingvo.compat as tf
from lingvo.core import spectrum_augmenter
from lingvo.core import spectrum_augmenter_on_device
from lingvo.core import test_utils
import numpy as np
from six.moves import range
class SpectrumAugmenterTest(test_utils.TestCase):
def testSpectrumAugmenterWithTimeMask(self):
with self.session(use_gpu=False, graph=tf.Graph()):
tf.random.set_seed(127)
batch_size = 5
inputs = tf.ones([batch_size, 20, 2, 2], dtype=tf.float32)
paddings = []
for i in range(batch_size):
paddings.append(
tf.concat([tf.zeros([1, i + 12]),
tf.ones([1, 8 - i])], axis=1))
paddings = tf.concat(paddings, axis=0)
hs = []
for p in [
spectrum_augmenter.SpectrumAugmenter.Params(),
spectrum_augmenter_on_device.SpectrumAugmenterOnDevice.Params()
]:
p.name = 'specAug_layers'
p.freq_mask_max_bins = 0
p.time_mask_max_frames = 5
p.time_mask_count = 2
p.time_mask_max_ratio = 1.0
p.random_seed = 23456
specaug_layer = p.Instantiate()
h, _ = specaug_layer.FPropDefaultTheta(inputs, paddings)
hs.append(h)
layer_output, layer_output_on_device = self.evaluate(hs)
self.assertAllClose(layer_output, layer_output_on_device)
def testSpectrumAugmenterDynamicSizeTimeMask(self):
with self.session(use_gpu=False, graph=tf.Graph()):
tf.random.set_seed(127)
batch_size = 3
inputs = tf.ones([batch_size, 20, 2, 2], dtype=tf.float32)
paddings = []
for i in range(batch_size):
paddings.append(
tf.concat([tf.zeros([1, 8 * i + 3]),
tf.ones([1, 17 - 8 * i])],
axis=1))
paddings = tf.concat(paddings, axis=0)
hs = []
for p in [
spectrum_augmenter.SpectrumAugmenter.Params(),
spectrum_augmenter_on_device.SpectrumAugmenterOnDevice.Params()
]:
p.name = 'specAug_layers'
p.freq_mask_max_bins = 0
p.time_mask_max_ratio = 0.4
p.time_mask_count = 1
p.use_dynamic_time_mask_max_frames = True
p.random_seed = 12345
specaug_layer = p.Instantiate()
h, _ = specaug_layer.FPropDefaultTheta(inputs, paddings)
hs.append(h)
layer_output, layer_output_on_device = self.evaluate(hs)
self.assertAllClose(layer_output, layer_output_on_device)
def testSpectrumAugmenterDynamicMultiplicityTimeMask(self):
with self.session(use_gpu=False, graph=tf.Graph()):
tf.random.set_seed(127)
batch_size = 4
inputs = tf.ones([batch_size, 22, 2, 2], dtype=tf.float32)
paddings = []
for i in range(batch_size):
paddings.append(
tf.concat([tf.zeros([1, 5 * i + 5]),
tf.ones([1, 16 - 5 * i])],
axis=1))
paddings = tf.concat(paddings, axis=0)
hs = []
for p in [
spectrum_augmenter.SpectrumAugmenter.Params(),
spectrum_augmenter_on_device.SpectrumAugmenterOnDevice.Params()
]:
p.name = 'specAug_layers'
p.freq_mask_max_bins = 0
p.time_mask_max_frames = 5
p.time_mask_count = 10
p.time_masks_per_frame = 0.2
p.random_seed = 67890
specaug_layer = p.Instantiate()
h, _ = specaug_layer.FPropDefaultTheta(inputs, paddings)
hs.append(h)
layer_output, layer_output_on_device = self.evaluate(hs)
self.assertAllClose(layer_output, layer_output_on_device)
def testSpectrumAugmenterDynamicSizeAndMultiplicityTimeMask(self):
with self.session(use_gpu=False, graph=tf.Graph()):
tf.random.set_seed(127)
batch_size = 4
inputs = tf.ones([batch_size, 22, 2, 2], dtype=tf.float32)
paddings = []
for i in range(batch_size):
paddings.append(
tf.concat([tf.zeros([1, 5 * i + 5]),
tf.ones([1, 16 - 5 * i])],
axis=1))
paddings = tf.concat(paddings, axis=0)
hs = []
for p in [
spectrum_augmenter.SpectrumAugmenter.Params(),
spectrum_augmenter_on_device.SpectrumAugmenterOnDevice.Params()
]:
p.name = 'specAug_layers'
p.freq_mask_max_bins = 0
p.time_mask_max_frames = 5
p.time_mask_count = 10
p.time_masks_per_frame = 0.2
p.time_mask_max_ratio = 0.4
p.use_dynamic_time_mask_max_frames = True
p.random_seed = 67890
specaug_layer = p.Instantiate()
h, _ = specaug_layer.FPropDefaultTheta(inputs, paddings)
hs.append(h)
layer_output, layer_output_on_device = self.evaluate(hs)
self.assertAllClose(layer_output, layer_output_on_device)
def testSpectrumAugmenterWithFrequencyMask(self):
with self.session(use_gpu=False, graph=tf.Graph()):
tf.random.set_seed(1234)
inputs = tf.ones([3, 5, 10, 1], dtype=tf.float32)
paddings = tf.zeros([3, 5])
hs = []
for p in [
spectrum_augmenter.SpectrumAugmenter.Params(),
spectrum_augmenter_on_device.SpectrumAugmenterOnDevice.Params()
]:
p.name = 'specAug_layers'
p.freq_mask_max_bins = 6
p.freq_mask_count = 2
p.time_mask_max_frames = 0
p.random_seed = 34567
specaug_layer = p.Instantiate()
h, _ = specaug_layer.FPropDefaultTheta(inputs, paddings)
hs.append(h)
layer_output, layer_output_on_device = self.evaluate(hs)
self.assertAllClose(layer_output, layer_output_on_device)
def testSpectrumAugmenterWarpMatrixConstructor(self):
with self.session(use_gpu=False, graph=tf.Graph()):
inputs = tf.broadcast_to(tf.cast(tf.range(10), dtype=tf.float32), (4, 10))
origin = tf.cast([2, 4, 4, 5], dtype=tf.float32)
destination = tf.cast([3, 2, 6, 8], dtype=tf.float32)
choose_range = tf.cast([4, 8, 8, 10], dtype=tf.float32)
outputs = []
for p in [
spectrum_augmenter.SpectrumAugmenter.Params(),
spectrum_augmenter_on_device.SpectrumAugmenterOnDevice.Params()
]:
p.name = 'specAug_layers'
specaug_layer = p.Instantiate()
warp_matrix = specaug_layer._ConstructWarpMatrix(
batch_size=4,
matrix_size=10,
origin=origin,
destination=destination,
choose_range=choose_range,
dtype=tf.float32)
output = tf.einsum('bij,bj->bi', warp_matrix, inputs)
outputs.append(output)
layer_output, layer_output_on_device = self.evaluate(outputs)
self.assertAllClose(layer_output, layer_output_on_device)
def testSpectrumAugmenterWithTimeWarping(self):
with self.session(use_gpu=False, graph=tf.Graph()):
tf.random.set_seed(1234)
inputs = tf.broadcast_to(tf.cast(tf.range(10), dtype=tf.float32), (3, 10))
inputs = tf.expand_dims(tf.expand_dims(inputs, -1), -1)
paddings = []
for i in range(3):
paddings.append(
tf.concat([tf.zeros([1, i + 7]),
tf.ones([1, 3 - i])], axis=1))
paddings = tf.concat(paddings, axis=0)
hs = []
for p in [
spectrum_augmenter.SpectrumAugmenter.Params(),
spectrum_augmenter_on_device.SpectrumAugmenterOnDevice.Params()
]:
p.name = 'specAug_layers'
p.freq_mask_max_bins = 0
p.time_mask_max_frames = 0
p.time_warp_max_frames = 8
p.time_warp_max_ratio = 1.0
p.time_warp_bound = 'static'
p.random_seed = 34567
specaug_layer = p.Instantiate()
h, _ = specaug_layer.FPropDefaultTheta(inputs, paddings)
hs.append(h)
layer_output, layer_output_on_device = self.evaluate(hs)
self.assertAllClose(layer_output, layer_output_on_device)
def testSpectrumAugmenterWithDynamicTimeWarping(self):
with self.session(use_gpu=False, graph=tf.Graph()):
tf.random.set_seed(1234)
inputs = tf.broadcast_to(tf.cast(tf.range(10), dtype=tf.float32), (3, 10))
inputs = tf.expand_dims(tf.expand_dims(inputs, -1), -1)
paddings = []
for i in range(3):
paddings.append(
tf.concat([tf.zeros([1, 2 * i + 5]),
tf.ones([1, 5 - 2 * i])],
axis=1))
paddings = tf.concat(paddings, axis=0)
hs = []
for p in [
spectrum_augmenter.SpectrumAugmenter.Params(),
spectrum_augmenter_on_device.SpectrumAugmenterOnDevice.Params()
]:
p.name = 'specAug_layers'
p.freq_mask_max_bins = 0
p.time_mask_max_frames = 0
p.time_warp_max_ratio = 0.5
p.time_warp_bound = 'dynamic'
p.random_seed = 34567
specaug_layer = p.Instantiate()
h, _ = specaug_layer.FPropDefaultTheta(inputs, paddings)
hs.append(h)
layer_output, layer_output_on_device = self.evaluate(hs)
self.assertAllClose(layer_output, layer_output_on_device)
def testSpectrumAugmenterUnstacking(self):
with self.session(use_gpu=False, graph=tf.Graph()):
tf.random.set_seed(1234)
inputs = tf.ones([3, 5, 10, 1], dtype=tf.float32)
paddings = tf.zeros([3, 5])
hs = []
for p in [
spectrum_augmenter.SpectrumAugmenter.Params(),
spectrum_augmenter_on_device.SpectrumAugmenterOnDevice.Params()
]:
p.name = 'specAug_layers'
p.unstack = True
p.stack_height = 2
p.freq_mask_max_bins = 5
p.time_mask_max_frames = 8
p.random_seed = 12345
specaug_layer = p.Instantiate()
h, _ = specaug_layer.FPropDefaultTheta(inputs, paddings)
hs.append(h)
layer_output, layer_output_on_device = self.evaluate(hs)
self.assertAllClose(layer_output, layer_output_on_device)
def testSpectrumAugmenterWithPerDomainPolicyFreqMask(self):
with self.session(use_gpu=False, graph=tf.Graph()):
tf.random.set_seed(1234)
inputs = tf.ones([6, 5, 4, 2], dtype=tf.float32)
input_domain_ids = tf.constant(
[[1] * 5, [2] * 5, [0] * 5, [2] * 5, [0] * 5, [1] * 5],
dtype=tf.float32)
paddings = tf.zeros([3, 5])
hs = []
for p in [
spectrum_augmenter.SpectrumAugmenter.Params(),
spectrum_augmenter_on_device.SpectrumAugmenterOnDevice.Params()
]:
p.name = 'specAug_layers'
p.domain_ids = [0, 1, 2]
p.freq_mask_max_bins = [0, 3, 8]
p.time_mask_max_frames = 0
p.random_seed = 1234
specaug_layer = p.Instantiate()
h, _ = specaug_layer.FPropDefaultTheta(
inputs, paddings, domain_ids=input_domain_ids)
hs.append(h)
layer_output, layer_output_on_device = self.evaluate(hs)
self.assertAllClose(layer_output, layer_output_on_device)
def testSpectrumAugmenterNoisify(self):
with self.session(use_gpu=False, graph=tf.Graph()):
tf.random.set_seed(127)
batch_size = 2
inputs = tf.ones([batch_size, 20, 2, 2], dtype=tf.float32)
paddings = []
for i in range(batch_size):
paddings.append(
tf.concat([tf.zeros([1, 8 * i + 3]),
tf.ones([1, 17 - 8 * i])],
axis=1))
paddings = tf.concat(paddings, axis=0)
hs = []
for p in [
spectrum_augmenter.SpectrumAugmenter.Params(),
spectrum_augmenter_on_device.SpectrumAugmenterOnDevice.Params()
]:
p.name = 'specAug_layers'
p.freq_mask_max_bins = 0
p.time_mask_max_ratio = 0.4
p.time_mask_count = 1
p.use_dynamic_time_mask_max_frames = True
p.use_noise = True
p.gaussian_noise = False
p.random_seed = 12345
specaug_layer = p.Instantiate()
h, _ = specaug_layer.FPropDefaultTheta(inputs, paddings)
hs.append(h)
layer_output, layer_output_on_device = self.evaluate(hs)
self.assertAllClose(layer_output, layer_output_on_device)
def testSpectrumAugmenterGaussianNoisify(self):
with self.session(use_gpu=False, graph=tf.Graph()):
tf.random.set_seed(127)
batch_size = 2
inputs = tf.ones([batch_size, 20, 2, 2], dtype=tf.float32)
paddings = []
for i in range(batch_size):
paddings.append(
tf.concat([tf.zeros([1, 8 * i + 3]),
tf.ones([1, 17 - 8 * i])],
axis=1))
paddings = tf.concat(paddings, axis=0)
hs = []
for p in [
spectrum_augmenter.SpectrumAugmenter.Params(),
spectrum_augmenter_on_device.SpectrumAugmenterOnDevice.Params()
]:
p.name = 'specAug_layers'
p.freq_mask_max_bins = 0
p.time_mask_max_ratio = 0.4
p.time_mask_count = 1
p.use_dynamic_time_mask_max_frames = True
p.use_noise = True
p.gaussian_noise = True
p.random_seed = 12345
specaug_layer = p.Instantiate()
h, _ = specaug_layer.FPropDefaultTheta(inputs, paddings)
hs.append(h)
layer_output, layer_output_on_device = self.evaluate(hs)
self.assertAllClose(layer_output, layer_output_on_device)
def testSpectrumAugmenterWithStatelessRandomOps(self):
with self.session(use_gpu=False, graph=tf.Graph()):
batch_size = 5
inputs1 = tf.random.uniform(
shape=[batch_size, 20, 2, 2], minval=0, maxval=1, dtype=tf.float32)
inputs2 = tf.random.uniform(
shape=[batch_size, 20, 2, 2], minval=0, maxval=1, dtype=tf.float32)
paddings = []
for i in range(batch_size):
paddings.append(
tf.concat([tf.zeros([1, i + 12]),
tf.ones([1, 8 - i])], axis=1))
paddings = tf.concat(paddings, axis=0)
p = spectrum_augmenter_on_device.SpectrumAugmenterOnDevice.Params()
p.name = 'specAug_layers'
p.freq_mask_count = 1
p.freq_mask_max_bins = 1
p.time_mask_max_frames = 5
p.time_mask_count = 2
p.time_mask_max_ratio = 1.0
p.use_input_dependent_random_seed = True
specaug_layer = p.Instantiate()
h1, _ = specaug_layer.FPropDefaultTheta(inputs1, paddings)
h2, _ = specaug_layer.FPropDefaultTheta(inputs2, paddings)
actual_layer_output1, actual_layer_output2 = self.evaluate([h1, h2])
self.assertAllEqual(
np.shape(actual_layer_output1), np.array([5, 20, 2, 2]))
self.assertNotAllEqual(actual_layer_output1, actual_layer_output2)
def testGraphContainsOnDeviceOps(self):
"""Checks that einsum and stateful random ops are not used on-device."""
model_graph = tf.Graph()
with model_graph.as_default():
batch_size = 5
inputs = tf.random.stateless_uniform(
shape=[batch_size, 20, 2, 2],
minval=0,
maxval=1,
seed=tf.constant([123, 123]),
dtype=tf.float32)
paddings = []
for i in range(batch_size):
paddings.append(
tf.concat([tf.zeros([1, i + 12]),
tf.ones([1, 8 - i])], axis=1))
paddings = tf.concat(paddings, axis=0)
p = spectrum_augmenter_on_device.SpectrumAugmenterOnDevice.Params()
p.name = 'specAug_layers'
p.freq_mask_count = 1
p.freq_mask_max_bins = 1
p.time_mask_max_frames = 5
p.time_mask_count = 2
p.use_noise = True
p.gaussian_noise = True
p.time_mask_max_ratio = 1.0
p.use_input_dependent_random_seed = True
specaug_layer = p.Instantiate()
_, _ = specaug_layer.FPropDefaultTheta(inputs, paddings)
# A list of ops that are not compatible with on-device training.
unsupported_on_device_nodes = [
'RandomUniform', 'RandomStandardNormal', 'Einsum'
]
for node in model_graph.as_graph_def().node:
self.assertNotIn(node.op, unsupported_on_device_nodes)
def testEinsumReplacementBBmBm(self):
with self.session(use_gpu=False, graph=tf.Graph()):
a = tf.random.uniform(shape=[20], minval=0, maxval=1, dtype=tf.float32)
b = tf.random.uniform(
shape=[20, 10], minval=0, maxval=1, dtype=tf.float32)
einsum = tf.einsum('b,bm->bm', a, b)
p = spectrum_augmenter_on_device.SpectrumAugmenterOnDevice.Params()
p.name = 'specAug_layers'
specaug_layer = p.Instantiate()
replacement = specaug_layer.EinsumBBmBm(a, b)
einsum, replacement = self.evaluate([einsum, replacement])
self.assertAllClose(einsum, replacement)
def testEinsumReplacementBxycByBxyc(self):
with self.session(use_gpu=False, graph=tf.Graph()):
a = tf.random.uniform(
shape=[20, 5, 7, 4], minval=0, maxval=1, dtype=tf.float32)
b = tf.random.uniform(shape=[20, 7], minval=0, maxval=1, dtype=tf.float32)
einsum = tf.einsum('bxyc,by->bxyc', a, b)
p = spectrum_augmenter_on_device.SpectrumAugmenterOnDevice.Params()
p.name = 'specAug_layers'
specaug_layer = p.Instantiate()
replacement = specaug_layer.EinsumBxycByBxyc(a, b)
einsum, replacement = self.evaluate([einsum, replacement])
self.assertAllClose(einsum, replacement)
def testEinsumReplacementBxycBxBxyc(self):
with self.session(use_gpu=False, graph=tf.Graph()):
a = tf.random.uniform(
shape=[20, 5, 7, 4], minval=0, maxval=1, dtype=tf.float32)
b = tf.random.uniform(shape=[20, 5], minval=0, maxval=1, dtype=tf.float32)
einsum = tf.einsum('bxyc,bx->bxyc', a, b)
p = spectrum_augmenter_on_device.SpectrumAugmenterOnDevice.Params()
p.name = 'specAug_layers'
specaug_layer = p.Instantiate()
replacement = specaug_layer.EinsumBxycBxBxyc(a, b)
einsum, replacement = self.evaluate([einsum, replacement])
self.assertAllClose(einsum, replacement)
def testEinsumReplacementBxyBxBxy(self):
with self.session(use_gpu=False, graph=tf.Graph()):
a = tf.random.uniform(
shape=[20, 7, 4], minval=0, maxval=1, dtype=tf.float32)
b = tf.random.uniform(shape=[20, 7], minval=0, maxval=1, dtype=tf.float32)
einsum = tf.einsum('bxy,bx->bxy', a, b)
p = spectrum_augmenter_on_device.SpectrumAugmenterOnDevice.Params()
p.name = 'specAug_layers'
specaug_layer = p.Instantiate()
replacement = specaug_layer.EinsumBxyBxBxy(a, b)
einsum, replacement = self.evaluate([einsum, replacement])
self.assertAllClose(einsum, replacement)
def testEinsumReplacementBxycBzxBzyc(self):
with self.session(use_gpu=False, graph=tf.Graph()):
a = tf.random.uniform(
shape=[20, 7, 4, 3], minval=0, maxval=1, dtype=tf.float32)
b = tf.random.uniform(
shape=[20, 5, 7], minval=0, maxval=1, dtype=tf.float32)
einsum = tf.einsum('bxyc,bzx->bzyc', a, b)
p = spectrum_augmenter_on_device.SpectrumAugmenterOnDevice.Params()
p.name = 'specAug_layers'
specaug_layer = p.Instantiate()
replacement = specaug_layer.EinsumBxycBzxBzyc(a, b)
einsum, replacement = self.evaluate([einsum, replacement])
self.assertAllClose(einsum, replacement)
if __name__ == '__main__':
tf.test.main()
| 40.054545
| 80
| 0.637262
| 2,524
| 19,827
| 4.797147
| 0.099445
| 0.031715
| 0.034688
| 0.043608
| 0.792369
| 0.78221
| 0.767261
| 0.766435
| 0.761397
| 0.750661
| 0
| 0.033909
| 0.245877
| 19,827
| 494
| 81
| 40.135628
| 0.775883
| 0.042568
| 0
| 0.766147
| 0
| 0
| 0.020837
| 0
| 0
| 0
| 0
| 0
| 0.044543
| 1
| 0.042316
| false
| 0
| 0.013363
| 0
| 0.057906
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
fe29e9d4848eec29299ab01c971a80b123d5364f
| 306
|
py
|
Python
|
src/tests/test_trivial.py
|
ndejong/pyvboxmanage
|
6cb49546782ae97f177e7035982b1dc86b8f61db
|
[
"BSD-2-Clause"
] | 1
|
2020-12-28T02:19:35.000Z
|
2020-12-28T02:19:35.000Z
|
src/tests/test_trivial.py
|
ndejong/pyvboxmanage
|
6cb49546782ae97f177e7035982b1dc86b8f61db
|
[
"BSD-2-Clause"
] | null | null | null |
src/tests/test_trivial.py
|
ndejong/pyvboxmanage
|
6cb49546782ae97f177e7035982b1dc86b8f61db
|
[
"BSD-2-Clause"
] | null | null | null |
import pytest
from pyvboxmanage import __author__
from pyvboxmanage import __version__
from pyvboxmanage import __title__
def test_author_exist():
assert __author__ is not None
def test_version_exist():
assert __version__ is not None
def test_title_exist():
assert __title__ is not None
| 17
| 36
| 0.794118
| 41
| 306
| 5.195122
| 0.341463
| 0.225352
| 0.309859
| 0.112676
| 0.150235
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.176471
| 306
| 17
| 37
| 18
| 0.845238
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.3
| 1
| 0.3
| true
| 0
| 0.4
| 0
| 0.7
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
fe34b1165175e9ca4d32cddd690f0443186dcade
| 20
|
py
|
Python
|
dataloader/dataset/__init__.py
|
sajith-rahim/papyrus
|
1f027274670b6492caaeb09e6ad6f80d2ebff390
|
[
"Apache-2.0"
] | 5
|
2019-03-24T07:33:12.000Z
|
2021-08-10T07:10:00.000Z
|
dataloader/dataset/__init__.py
|
sajith-rahim/papyrus
|
1f027274670b6492caaeb09e6ad6f80d2ebff390
|
[
"Apache-2.0"
] | null | null | null |
dataloader/dataset/__init__.py
|
sajith-rahim/papyrus
|
1f027274670b6492caaeb09e6ad6f80d2ebff390
|
[
"Apache-2.0"
] | 1
|
2021-08-10T07:10:01.000Z
|
2021-08-10T07:10:01.000Z
|
from .mnist import *
| 20
| 20
| 0.75
| 3
| 20
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15
| 20
| 1
| 20
| 20
| 0.882353
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
fe5310de7ff402e5af5ced94f870127483d6d509
| 36
|
py
|
Python
|
testing/__init__.py
|
michelebersani/ComputationalMathematics
|
ddc75251b01ed6b4f6f70d9aaf135c93f9c624f1
|
[
"MIT"
] | 1
|
2021-04-23T10:31:13.000Z
|
2021-04-23T10:31:13.000Z
|
testing/__init__.py
|
michelebersani/ComputationalMathematics
|
ddc75251b01ed6b4f6f70d9aaf135c93f9c624f1
|
[
"MIT"
] | null | null | null |
testing/__init__.py
|
michelebersani/ComputationalMathematics
|
ddc75251b01ed6b4f6f70d9aaf135c93f9c624f1
|
[
"MIT"
] | null | null | null |
from .multiple_runs import multi_run
| 36
| 36
| 0.888889
| 6
| 36
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 36
| 1
| 36
| 36
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
fe5af8266a2106adcbb7b93247d388aa9a95f346
| 114
|
py
|
Python
|
profiles/utils.py
|
abdellatifLabr/MyStore
|
6a1db004d7372c236be72077aa55260927a46135
|
[
"MIT"
] | null | null | null |
profiles/utils.py
|
abdellatifLabr/MyStore
|
6a1db004d7372c236be72077aa55260927a46135
|
[
"MIT"
] | null | null | null |
profiles/utils.py
|
abdellatifLabr/MyStore
|
6a1db004d7372c236be72077aa55260927a46135
|
[
"MIT"
] | null | null | null |
import uuid
def build_avatar_path(instance, filename):
return f'img/profile/avatar/{uuid.uuid4()}_{filename}'
| 28.5
| 58
| 0.763158
| 16
| 114
| 5.25
| 0.8125
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009709
| 0.096491
| 114
| 4
| 58
| 28.5
| 0.805825
| 0
| 0
| 0
| 0
| 0
| 0.382609
| 0.382609
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
fe612c49f6a9bee666e6e76365e814bbd645d44e
| 418
|
py
|
Python
|
tests/conftest.py
|
vintesk/gpwebpay
|
70f790e82831baaeb807cf4703191710e51b9b23
|
[
"MIT"
] | 5
|
2018-02-01T15:59:31.000Z
|
2021-05-31T08:15:54.000Z
|
tests/conftest.py
|
filias/gpwebpay
|
70f790e82831baaeb807cf4703191710e51b9b23
|
[
"MIT"
] | 61
|
2020-01-09T23:04:32.000Z
|
2022-01-02T18:26:57.000Z
|
tests/conftest.py
|
vintesk/gpwebpay
|
70f790e82831baaeb807cf4703191710e51b9b23
|
[
"MIT"
] | null | null | null |
import base64
import pytest
from gpwebpay.config import configuration
from gpwebpay.gpwebpay import GpwebpayClient
@pytest.fixture()
def gateway_client():
return GpwebpayClient()
@pytest.fixture()
def private_key() -> bytes:
return base64.b64decode(configuration.GPWEBPAY_MERCHANT_PRIVATE_KEY)
@pytest.fixture()
def public_key() -> bytes:
return base64.b64decode(configuration.GPWEBPAY_PUBLIC_KEY)
| 19
| 72
| 0.789474
| 48
| 418
| 6.708333
| 0.395833
| 0.121118
| 0.149068
| 0.186335
| 0.310559
| 0.310559
| 0.310559
| 0
| 0
| 0
| 0
| 0.027248
| 0.12201
| 418
| 21
| 73
| 19.904762
| 0.850136
| 0
| 0
| 0.230769
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.230769
| true
| 0
| 0.307692
| 0.230769
| 0.769231
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
2287ae37ae4f1a5ae7c8e582a10c93cb91f42725
| 109
|
py
|
Python
|
506-Packaging-Lambda-Code-in-a-Container-Image/app.py
|
AWSCookbook/Serverless
|
ad39607b2901774b99056505e0ed03386c10ef7e
|
[
"MIT"
] | 5
|
2021-12-16T19:21:12.000Z
|
2022-02-10T02:23:16.000Z
|
506-Packaging-Lambda-Code-in-a-Container-Image/app.py
|
AWSCookbook/Serverless
|
ad39607b2901774b99056505e0ed03386c10ef7e
|
[
"MIT"
] | null | null | null |
506-Packaging-Lambda-Code-in-a-Container-Image/app.py
|
AWSCookbook/Serverless
|
ad39607b2901774b99056505e0ed03386c10ef7e
|
[
"MIT"
] | 4
|
2021-11-25T13:42:24.000Z
|
2022-02-25T06:53:11.000Z
|
import sys
def handler(event, context):
return 'Hello from the AWS Cookbook ' + sys.version + '!'
| 36.333333
| 69
| 0.642202
| 14
| 109
| 5
| 0.928571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.247706
| 109
| 3
| 69
| 36.333333
| 0.853659
| 0
| 0
| 0
| 0
| 0
| 0.263636
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
2292f2acae4e80e79020c793179fecbd620a995c
| 1,352
|
py
|
Python
|
python/msc/entry/15.py
|
gerritjvv/optimization_algorithms
|
eab2e8fff39eeab8d9be45af3dae3be1a62be3ba
|
[
"MIT"
] | null | null | null |
python/msc/entry/15.py
|
gerritjvv/optimization_algorithms
|
eab2e8fff39eeab8d9be45af3dae3be1a62be3ba
|
[
"MIT"
] | null | null | null |
python/msc/entry/15.py
|
gerritjvv/optimization_algorithms
|
eab2e8fff39eeab8d9be45af3dae3be1a62be3ba
|
[
"MIT"
] | null | null | null |
import numpy as np
import sympy as s
def f_a(a, b, x):
return a * np.sin(2 * x) + b * np.cos(2 * x) - (x / 4.0) * np.cos(2 * x)
def f_da(a, b):
x = s.Symbol('x')
f = a * s.sin(2 * x) + b * s.cos(2 * x) - (x / 4.0) * s.cos(2 * x)
f_prime = f.diff(x)
print(f_prime)
l = s.lambdify(x, f_prime)
return l
def f_b(a, b, x):
return a * np.sin(2 * x) + b * np.cos(2 * x) - (x / 4.0) * np.sin(2 * x)
def f_db(a, b):
x = s.Symbol('x')
f = a * s.sin(2 * x) + b * s.cos(2 * x) - (x / 4.0) * s.sin(2 * x)
f_prime = f.diff(x)
print(f_prime)
l = s.lambdify(x, f_prime)
return l
def f_c(a, b, x):
return a * np.sin(2 * x) + b * np.cos(2 * x)
def f_dc(a, b):
x = s.Symbol('x')
f = a * s.sin(2 * x) + b * s.cos(2 * x)
f_prime = f.diff(x)
print(f_prime)
l = s.lambdify(x, f_prime)
return l
def f_d(a, b, x):
return (a + b * x) * np.e ** (-2 * x) - (x / 4.0) * np.cos(2 * x)
def f_dd(a, b):
x = s.Symbol('x')
f = (a + b * x) * s.exp(-2 * x) - (x / 4.0) * s.cos(2 * x)
f_prime = f.diff(x)
print(f_prime)
l = s.lambdify(x, f_prime)
return l
# print(f_a(2, 2, 0))
# # 1
# print(f_b(2, 2, 0))
# # 1
# print(f_c(2, 2, 0))
# # 1
# print(f_d(2, 2, 0))
# 1
print(f_da(1, 1)(0))
print(f_db(1, 1)(0))
print(f_dc(1, 1)(0))
print(f_dd(1, 1)(0))
| 17.333333
| 76
| 0.462278
| 308
| 1,352
| 1.938312
| 0.103896
| 0.067002
| 0.050251
| 0.060302
| 0.850921
| 0.788945
| 0.715243
| 0.715243
| 0.693467
| 0.693467
| 0
| 0.063559
| 0.301775
| 1,352
| 77
| 77
| 17.558442
| 0.568856
| 0.066568
| 0
| 0.47619
| 0
| 0
| 0.0032
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.190476
| false
| 0
| 0.047619
| 0.095238
| 0.428571
| 0.190476
| 0
| 0
| 1
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
22c5af7cb8e9126224a9facdae968a7c93c8d55b
| 9,512
|
py
|
Python
|
ml/rl/preprocessing/batch_preprocessor.py
|
brettkoonce/ReAgent
|
dcaa16e0bdc5e1cecf816a6683e8909a9859855d
|
[
"BSD-3-Clause"
] | 2
|
2021-05-23T22:11:21.000Z
|
2021-06-17T13:08:53.000Z
|
ml/rl/preprocessing/batch_preprocessor.py
|
brettkoonce/ReAgent
|
dcaa16e0bdc5e1cecf816a6683e8909a9859855d
|
[
"BSD-3-Clause"
] | null | null | null |
ml/rl/preprocessing/batch_preprocessor.py
|
brettkoonce/ReAgent
|
dcaa16e0bdc5e1cecf816a6683e8909a9859855d
|
[
"BSD-3-Clause"
] | 2
|
2021-01-06T01:06:50.000Z
|
2021-06-24T01:12:52.000Z
|
#!/usr/bin/env python3
from typing import Tuple, Union, cast
import torch
from ml.rl import types as rlt
from ml.rl.preprocessing.normalization import get_num_output_features
from ml.rl.preprocessing.preprocessor import Preprocessor
class BatchPreprocessor:
def __call__(self, batch: rlt.RawTrainingBatch) -> rlt.PreprocessedTrainingBatch:
raise NotImplementedError()
class DiscreteDqnBatchPreprocessor(BatchPreprocessor):
def __init__(self, state_preprocessor: Preprocessor):
self.state_preprocessor = state_preprocessor
def __call__(self, batch: rlt.RawTrainingBatch) -> rlt.PreprocessedTrainingBatch:
training_input = batch.training_input
assert isinstance(
training_input, (rlt.RawDiscreteDqnInput, rlt.RawMemoryNetworkInput)
), "Wrong Type: {}".format(str(type(training_input)))
preprocessed_state = self.state_preprocessor(
training_input.state.float_features.value,
training_input.state.float_features.presence,
)
preprocessed_next_state = self.state_preprocessor(
training_input.next_state.float_features.value,
training_input.next_state.float_features.presence,
)
new_training_input = training_input.preprocess_tensors(
state=preprocessed_state, next_state=preprocessed_next_state
)
return batch.preprocess(new_training_input)
class SequentialDiscreteDqnBatchPreprocessor(DiscreteDqnBatchPreprocessor):
def __init__(self, state_preprocessor: Preprocessor, action_dim: int, seq_len: int):
super().__init__(state_preprocessor)
self.state_dim = get_num_output_features(
state_preprocessor.normalization_parameters
)
self.seq_len = seq_len
self.action_dim = action_dim
def __call__(self, batch: rlt.RawTrainingBatch) -> rlt.PreprocessedTrainingBatch:
preprocessed_batch = super().__call__(batch)
training_input = preprocessed_batch.training_input
assert isinstance(training_input, rlt.PreprocessedMemoryNetworkInput)
preprocessed_batch = preprocessed_batch._replace(
training_input=training_input._replace(
state=rlt.PreprocessedFeatureVector(
float_features=training_input.state.float_features.reshape(
-1, self.seq_len, self.state_dim
)
),
action=training_input.action.reshape(-1, self.seq_len, self.action_dim),
next_state=rlt.PreprocessedFeatureVector(
float_features=training_input.next_state.float_features.reshape(
-1, self.seq_len, self.state_dim
)
),
reward=training_input.reward.reshape(-1, self.seq_len),
not_terminal=preprocessed_batch.training_input.not_terminal.reshape(
-1, self.seq_len
),
)
)
return preprocessed_batch
class ParametricDqnBatchPreprocessor(BatchPreprocessor):
def __init__(
self, state_preprocessor: Preprocessor, action_preprocessor: Preprocessor
):
self.state_preprocessor = state_preprocessor
self.action_preprocessor = action_preprocessor
def __call__(self, batch: rlt.RawTrainingBatch) -> rlt.PreprocessedTrainingBatch:
training_input = batch.training_input
assert isinstance(
training_input, (rlt.RawParametricDqnInput, rlt.RawMemoryNetworkInput)
), "Wrong Type: {}".format(str(type(training_input)))
is_memory_network = isinstance(training_input, rlt.RawMemoryNetworkInput)
preprocessed_state = self.state_preprocessor(
training_input.state.float_features.value,
training_input.state.float_features.presence,
)
preprocessed_next_state = self.state_preprocessor(
training_input.next_state.float_features.value,
training_input.next_state.float_features.presence,
)
assert isinstance(training_input.action, rlt.FeatureVector)
preprocessed_action = self.action_preprocessor(
training_input.action.float_features.value,
training_input.action.float_features.presence,
)
if is_memory_network:
assert isinstance(training_input, rlt.RawMemoryNetworkInput)
return batch.preprocess(
training_input=training_input.preprocess_tensors(
state=preprocessed_state,
next_state=preprocessed_next_state,
action=preprocessed_action,
)
)
else:
assert isinstance(training_input, rlt.RawParametricDqnInput)
preprocessed_tiled_next_state = self.state_preprocessor(
training_input.tiled_next_state.float_features.value,
training_input.tiled_next_state.float_features.presence,
)
preprocessed_next_action = self.action_preprocessor(
training_input.next_action.float_features.value,
training_input.next_action.float_features.presence,
)
preprocessed_possible_actions = self.action_preprocessor(
training_input.possible_actions.float_features.value,
training_input.possible_actions.float_features.presence,
)
preprocessed_possible_next_actions = self.action_preprocessor(
training_input.possible_next_actions.float_features.value,
training_input.possible_next_actions.float_features.presence,
)
return batch.preprocess(
training_input=training_input.preprocess_tensors(
state=preprocessed_state,
next_state=preprocessed_next_state,
action=preprocessed_action,
next_action=preprocessed_next_action,
possible_actions=preprocessed_possible_actions,
possible_next_actions=preprocessed_possible_next_actions,
tiled_next_state=preprocessed_tiled_next_state,
)
)
class SequentialParametricDqnBatchPreprocessor(ParametricDqnBatchPreprocessor):
def __init__(
self,
state_preprocessor: Preprocessor,
action_preprocessor: Preprocessor,
seq_len: int,
):
super().__init__(state_preprocessor, action_preprocessor)
self.state_dim = get_num_output_features(
state_preprocessor.normalization_parameters
)
self.action_dim = get_num_output_features(
action_preprocessor.normalization_parameters
)
self.seq_len = seq_len
def __call__(self, batch: rlt.RawTrainingBatch) -> rlt.PreprocessedTrainingBatch:
preprocessed_batch = super().__call__(batch)
training_input = preprocessed_batch.training_input
assert isinstance(training_input, rlt.PreprocessedMemoryNetworkInput)
preprocessed_batch = preprocessed_batch._replace(
training_input=training_input._replace(
state=rlt.PreprocessedFeatureVector(
float_features=training_input.state.float_features.reshape(
-1, self.seq_len, self.state_dim
)
),
action=training_input.action.reshape(-1, self.seq_len, self.action_dim),
next_state=rlt.PreprocessedFeatureVector(
float_features=training_input.next_state.float_features.reshape(
-1, self.seq_len, self.state_dim
)
),
reward=training_input.reward.reshape(-1, self.seq_len),
not_terminal=training_input.not_terminal.reshape(-1, self.seq_len),
)
)
return preprocessed_batch
class PolicyNetworkBatchPreprocessor(BatchPreprocessor):
def __init__(
self, state_preprocessor: Preprocessor, action_preprocessor: Preprocessor
):
self.state_preprocessor = state_preprocessor
self.action_preprocessor = action_preprocessor
def __call__(self, batch: rlt.RawTrainingBatch) -> rlt.PreprocessedTrainingBatch:
training_input = batch.training_input
assert isinstance(training_input, rlt.RawPolicyNetworkInput)
preprocessed_state = self.state_preprocessor(
training_input.state.float_features.value,
training_input.state.float_features.presence,
)
preprocessed_next_state = self.state_preprocessor(
training_input.next_state.float_features.value,
training_input.next_state.float_features.presence,
)
preprocessed_action = self.action_preprocessor(
training_input.action.float_features.value,
training_input.action.float_features.presence,
)
preprocessed_next_action = self.action_preprocessor(
training_input.next_action.float_features.value,
training_input.next_action.float_features.presence,
)
return batch.preprocess(
training_input=training_input.preprocess_tensors(
state=preprocessed_state,
next_state=preprocessed_next_state,
action=preprocessed_action,
next_action=preprocessed_next_action,
)
)
| 44.037037
| 88
| 0.66926
| 880
| 9,512
| 6.827273
| 0.096591
| 0.151465
| 0.053928
| 0.056258
| 0.859021
| 0.828395
| 0.81275
| 0.730193
| 0.709221
| 0.677929
| 0
| 0.001575
| 0.26598
| 9,512
| 215
| 89
| 44.24186
| 0.858923
| 0.002208
| 0
| 0.580311
| 0
| 0
| 0.00295
| 0
| 0
| 0
| 0
| 0
| 0.041451
| 1
| 0.056995
| false
| 0
| 0.025907
| 0
| 0.145078
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
fe1a9b988209ca845015a3c8edfd39864c966052
| 116
|
py
|
Python
|
tests/test.py
|
cwoodall/doppler-gestures-py
|
625d506452bdc35412f129a7c746ecb8fe0dee26
|
[
"MIT"
] | 4
|
2015-06-28T00:27:34.000Z
|
2018-08-19T00:43:35.000Z
|
tests/test.py
|
cwoodall/doppler-gestures-py
|
625d506452bdc35412f129a7c746ecb8fe0dee26
|
[
"MIT"
] | 17
|
2015-03-22T03:07:34.000Z
|
2021-02-27T13:19:02.000Z
|
tests/test.py
|
cwoodall/doppler-gestures-py
|
625d506452bdc35412f129a7c746ecb8fe0dee26
|
[
"MIT"
] | 5
|
2015-03-25T01:33:56.000Z
|
2018-08-19T00:43:20.000Z
|
import nose
def test_nose_working():
"""
Test that the nose runner is working.
"""
assert True
| 14.5
| 45
| 0.603448
| 15
| 116
| 4.533333
| 0.733333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.310345
| 116
| 7
| 46
| 16.571429
| 0.85
| 0.318966
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
a3f144f65ecff9cc551fe7b3e7cf87a4a9dd45a0
| 104
|
py
|
Python
|
malanka/websockets.py
|
alex-oleshkevich/malanka
|
d46207fd889f5d2cd3888ac04ea980a963a7559f
|
[
"MIT"
] | 1
|
2021-08-01T21:09:59.000Z
|
2021-08-01T21:09:59.000Z
|
malanka/websockets.py
|
alex-oleshkevich/malanka
|
d46207fd889f5d2cd3888ac04ea980a963a7559f
|
[
"MIT"
] | null | null | null |
malanka/websockets.py
|
alex-oleshkevich/malanka
|
d46207fd889f5d2cd3888ac04ea980a963a7559f
|
[
"MIT"
] | null | null | null |
from starlette.websockets import WebSocket, WebSocketClose, WebSocketDisconnect, WebSocketState # noqa
| 52
| 103
| 0.855769
| 9
| 104
| 9.888889
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.096154
| 104
| 1
| 104
| 104
| 0.946809
| 0.038462
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
431710dc46aa23cd0e91b299dba1e232c5bfc722
| 187
|
py
|
Python
|
domain_adaptation/feature_based/pca.py
|
eddardd/CrossDomainFaultDetection
|
83dd24727a8b35cda2549b40166beaf740e14c98
|
[
"MIT"
] | 3
|
2021-08-30T11:41:36.000Z
|
2021-12-22T10:45:25.000Z
|
domain_adaptation/feature_based/pca.py
|
eddardd/CrossDomainFaultDiagnosis
|
83dd24727a8b35cda2549b40166beaf740e14c98
|
[
"MIT"
] | 1
|
2021-02-26T06:02:33.000Z
|
2021-02-26T06:02:33.000Z
|
domain_adaptation/feature_based/pca.py
|
eddardd/CrossDomainFaultDetection
|
83dd24727a8b35cda2549b40166beaf740e14c98
|
[
"MIT"
] | 2
|
2021-06-03T11:46:20.000Z
|
2022-03-25T09:16:03.000Z
|
import numpy as np
from sklearn.decomposition import PCA
def DAPCA(Xs, Xt, n_components=2):
return PCA(n_components=n_components).fit(np.concatenate([Xs, Xt], axis=0)).components_.T
| 31.166667
| 93
| 0.764706
| 31
| 187
| 4.483871
| 0.677419
| 0.23741
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.012048
| 0.112299
| 187
| 6
| 93
| 31.166667
| 0.825301
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.5
| 0.25
| 1
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
4332a92ae7efd23ada1f62516ce25aa2afbf2df2
| 15,086
|
py
|
Python
|
src/extension/tests/Test_EnvHealthManager.py
|
Azure/LinuxPatchExtension
|
6af622afb4298805bdf47328d6bc66a785f7166b
|
[
"Apache-2.0"
] | 4
|
2020-06-01T14:36:30.000Z
|
2021-08-24T16:55:50.000Z
|
src/extension/tests/Test_EnvHealthManager.py
|
Azure/LinuxPatchExtension
|
6af622afb4298805bdf47328d6bc66a785f7166b
|
[
"Apache-2.0"
] | 34
|
2020-09-11T17:20:42.000Z
|
2022-03-28T14:08:44.000Z
|
src/extension/tests/Test_EnvHealthManager.py
|
Azure/LinuxPatchExtension
|
6af622afb4298805bdf47328d6bc66a785f7166b
|
[
"Apache-2.0"
] | 1
|
2020-12-28T10:13:20.000Z
|
2020-12-28T10:13:20.000Z
|
# Copyright 2020 Microsoft Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Requires Python 2.7+
import glob
import json
import os
import shutil
import tempfile
import time
import unittest
from datetime import datetime
from extension.src.Constants import Constants
from extension.src.EnvLayer import EnvLayer
from extension.src.EnvHealthManager import EnvHealthManager
from extension.src.RuntimeContextHandler import RuntimeContextHandler
from extension.src.file_handlers.CoreStateHandler import CoreStateHandler
from extension.src.EnableCommandHandler import EnableCommandHandler
from extension.src.file_handlers.ExtConfigSettingsHandler import ExtConfigSettingsHandler
from extension.src.file_handlers.ExtEnvHandler import ExtEnvHandler
from extension.src.file_handlers.ExtOutputStatusHandler import ExtOutputStatusHandler
from extension.src.file_handlers.ExtStateHandler import ExtStateHandler
from extension.src.ProcessHandler import ProcessHandler
from extension.tests.helpers.RuntimeComposer import RuntimeComposer
from extension.tests.helpers.VirtualTerminal import VirtualTerminal
class TestEnvManager(unittest.TestCase):
def setUp(self):
VirtualTerminal().print_lowlight("\n----------------- setup test runner -----------------")
# create tempdir which will have all the required files
self.temp_dir = tempfile.mkdtemp()
self.env_layer = EnvLayer()
self.env_health_manager = EnvHealthManager(self.env_layer)
# Overriding time.sleep to avoid delays in test execution
time.sleep = self.mock_sleep
def tearDown(self):
VirtualTerminal().print_lowlight("\n----------------- tear down test runner -----------------")
# delete tempdir
shutil.rmtree(self.temp_dir)
def mock_sleep(self, seconds):
pass
def test_ensure_tty_not_required_when_not_preset_in_sudoers(self):
mock_sudoers_file_path, mock_etc_sudoers_linux_patch_extension_file_path, backup_etc_sudoers_file_path, backup_etc_sudoers_linux_patch_extension_file_path = self.__ensure_tty_not_required_test_setup()
# when requiretty is not present in /etc/sudoers
mock_sudoers_content = "test"
self.write_to_file(mock_sudoers_file_path, mock_sudoers_content)
self.env_layer.etc_sudoers_file_path = mock_sudoers_file_path
self.env_layer.etc_sudoers_linux_patch_extension_file_path = mock_etc_sudoers_linux_patch_extension_file_path
self.assertFalse(self.env_layer.is_tty_required_in_sudoers())
self.assertFalse(self.env_layer.is_tty_disabled_in_linux_patch_extension_sudoers())
self.assertFalse(self.env_layer.is_tty_required())
self.env_health_manager.ensure_tty_not_required()
self.assertFalse(os.path.exists(self.env_layer.etc_sudoers_linux_patch_extension_file_path))
# wrap up
self.__wrap_up_ensure_tty_not_required_test(backup_etc_sudoers_file_path, backup_etc_sudoers_linux_patch_extension_file_path)
def test_ensure_tty_not_required_when_set_to_required_for_all_in_sudoers(self):
mock_sudoers_file_path, mock_etc_sudoers_linux_patch_extension_file_path, backup_etc_sudoers_file_path, backup_etc_sudoers_linux_patch_extension_file_path = self.__ensure_tty_not_required_test_setup()
# only Defaults requiretty present in /etc/sudoers
mock_sudoers_content = "Defaults requiretty"
self.write_to_file(mock_sudoers_file_path, mock_sudoers_content)
self.env_layer.etc_sudoers_file_path = mock_sudoers_file_path
self.env_layer.etc_sudoers_linux_patch_extension_file_path = mock_etc_sudoers_linux_patch_extension_file_path
self.assertTrue(self.env_layer.is_tty_required_in_sudoers())
self.assertFalse(self.env_layer.is_tty_disabled_in_linux_patch_extension_sudoers())
self.assertTrue(self.env_layer.is_tty_required())
self.env_health_manager.ensure_tty_not_required()
etc_sudoers_linux_patch_extension_configuration = self.env_layer.file_system.read_with_retry(self.env_layer.etc_sudoers_linux_patch_extension_file_path)
settings = etc_sudoers_linux_patch_extension_configuration.strip().split('\n')
self.assertTrue("Defaults:" + self.env_layer.get_current_user() + " !requiretty" in settings)
# wrap up
self.__wrap_up_ensure_tty_not_required_test(backup_etc_sudoers_file_path, backup_etc_sudoers_linux_patch_extension_file_path)
def test_ensure_tty_not_required_when_set_to_required_for_currentuser_in_sudoers(self):
mock_sudoers_file_path, mock_etc_sudoers_linux_patch_extension_file_path, backup_etc_sudoers_file_path, backup_etc_sudoers_linux_patch_extension_file_path = self.__ensure_tty_not_required_test_setup()
# only Defaults:currentuser requiretty present in /etc/sudoers
mock_sudoers_content = "Defaults:" + self.env_layer.get_current_user() + " requiretty"
self.write_to_file(mock_sudoers_file_path, mock_sudoers_content)
self.env_layer.etc_sudoers_file_path = mock_sudoers_file_path
self.env_layer.etc_sudoers_linux_patch_extension_file_path = mock_etc_sudoers_linux_patch_extension_file_path
self.assertTrue(self.env_layer.is_tty_required_in_sudoers())
self.assertFalse(self.env_layer.is_tty_disabled_in_linux_patch_extension_sudoers())
self.assertTrue(self.env_layer.is_tty_required())
self.env_health_manager.ensure_tty_not_required()
etc_sudoers_linux_patch_extension_configuration = self.env_layer.file_system.read_with_retry(self.env_layer.etc_sudoers_linux_patch_extension_file_path)
settings = etc_sudoers_linux_patch_extension_configuration.strip().split('\n')
self.assertTrue("Defaults:" + self.env_layer.get_current_user() + " !requiretty" in settings)
# wrap up
self.__wrap_up_ensure_tty_not_required_test(backup_etc_sudoers_file_path, backup_etc_sudoers_linux_patch_extension_file_path)
def test_ensure_tty_not_required_when_set_to_not_required_for_all_and_currentuser(self):
mock_sudoers_file_path, mock_etc_sudoers_linux_patch_extension_file_path, backup_etc_sudoers_file_path, backup_etc_sudoers_linux_patch_extension_file_path = self.__ensure_tty_not_required_test_setup()
# In /etc/sudoers: Defaults !requiretty and Defaults:currentuser !requiretty
mock_sudoers_content = "Defaults:" + self.env_layer.get_current_user() + " !requiretty" + "\n" + "Defaults !requiretty"
self.write_to_file(mock_sudoers_file_path, mock_sudoers_content)
self.env_layer.etc_sudoers_file_path = mock_sudoers_file_path
self.env_layer.etc_sudoers_linux_patch_extension_file_path = mock_etc_sudoers_linux_patch_extension_file_path
self.assertFalse(self.env_layer.is_tty_required_in_sudoers())
self.assertFalse(self.env_layer.is_tty_disabled_in_linux_patch_extension_sudoers())
self.assertFalse(self.env_layer.is_tty_required())
self.env_health_manager.ensure_tty_not_required()
self.assertFalse(os.path.exists(self.env_layer.etc_sudoers_linux_patch_extension_file_path))
# wrap up
self.__wrap_up_ensure_tty_not_required_test(backup_etc_sudoers_file_path, backup_etc_sudoers_linux_patch_extension_file_path)
def test_ensure_tty_not_required_when_set_to_required_for_currentuser_and_not_required_for_all(self):
mock_sudoers_file_path, mock_etc_sudoers_linux_patch_extension_file_path, backup_etc_sudoers_file_path, backup_etc_sudoers_linux_patch_extension_file_path = self.__ensure_tty_not_required_test_setup()
# In /etc/sudoers: Defaults:currentuser requiretty and Defaults !requiretty
mock_sudoers_content = "Defaults:" + self.env_layer.get_current_user() + " requiretty" + "\n" + "Defaults !requiretty"
self.write_to_file(mock_sudoers_file_path, mock_sudoers_content)
self.env_layer.etc_sudoers_file_path = mock_sudoers_file_path
self.env_layer.etc_sudoers_linux_patch_extension_file_path = mock_etc_sudoers_linux_patch_extension_file_path
self.assertFalse(self.env_layer.is_tty_required_in_sudoers())
self.assertFalse(self.env_layer.is_tty_disabled_in_linux_patch_extension_sudoers())
self.assertFalse(self.env_layer.is_tty_required())
self.env_health_manager.ensure_tty_not_required()
self.assertFalse(os.path.exists(self.env_layer.etc_sudoers_linux_patch_extension_file_path))
# wrap up
self.__wrap_up_ensure_tty_not_required_test(backup_etc_sudoers_file_path, backup_etc_sudoers_linux_patch_extension_file_path)
def test_ensure_tty_not_required_when_set_to_not_required_for_all_and_required_for_currentuser(self):
mock_sudoers_file_path, mock_etc_sudoers_linux_patch_extension_file_path, backup_etc_sudoers_file_path, backup_etc_sudoers_linux_patch_extension_file_path = self.__ensure_tty_not_required_test_setup()
# In /etc/sudoers: Defaults !requiretty and Defaults:currentuser requiretty
mock_sudoers_content = "Defaults !requiretty" + "\n" + "Defaults:" + self.env_layer.get_current_user() + " requiretty"
self.write_to_file(mock_sudoers_file_path, mock_sudoers_content)
self.env_layer.etc_sudoers_file_path = mock_sudoers_file_path
self.env_layer.etc_sudoers_linux_patch_extension_file_path = mock_etc_sudoers_linux_patch_extension_file_path
self.assertTrue(self.env_layer.is_tty_required_in_sudoers())
self.assertFalse(self.env_layer.is_tty_disabled_in_linux_patch_extension_sudoers())
self.assertTrue(self.env_layer.is_tty_required())
self.env_health_manager.ensure_tty_not_required()
etc_sudoers_linux_patch_extension_configuration = self.env_layer.file_system.read_with_retry(self.env_layer.etc_sudoers_linux_patch_extension_file_path)
settings = etc_sudoers_linux_patch_extension_configuration.strip().split('\n')
self.assertTrue("Defaults:" + self.env_layer.get_current_user() + " !requiretty" in settings)
# wrap up
self.__wrap_up_ensure_tty_not_required_test(backup_etc_sudoers_file_path, backup_etc_sudoers_linux_patch_extension_file_path)
def test_ensure_tty_not_required_when_set_to_required_for_all_and_not_required_for_currentuser(self):
mock_sudoers_file_path, mock_etc_sudoers_linux_patch_extension_file_path, backup_etc_sudoers_file_path, backup_etc_sudoers_linux_patch_extension_file_path = self.__ensure_tty_not_required_test_setup()
# In /etc/sudoers: Defaults requiretty and Defaults:currentuser !requiretty
mock_sudoers_content = "Defaults requiretty" + "\n" + "Defaults:" + self.env_layer.get_current_user() + " !requiretty"
self.write_to_file(mock_sudoers_file_path, mock_sudoers_content)
self.env_layer.etc_sudoers_file_path = mock_sudoers_file_path
self.env_layer.etc_sudoers_linux_patch_extension_file_path = mock_etc_sudoers_linux_patch_extension_file_path
self.assertFalse(self.env_layer.is_tty_required_in_sudoers())
self.assertFalse(self.env_layer.is_tty_disabled_in_linux_patch_extension_sudoers())
self.assertFalse(self.env_layer.is_tty_required())
self.env_health_manager.ensure_tty_not_required()
self.assertFalse(os.path.exists(self.env_layer.etc_sudoers_linux_patch_extension_file_path))
# wrap up
self.__wrap_up_ensure_tty_not_required_test(backup_etc_sudoers_file_path, backup_etc_sudoers_linux_patch_extension_file_path)
def test_ensure_tty_not_required_when_tty_set_to_required_in_default_sudoers_and_tty_set_to_not_required_in_custom_sudoers_file_for_extension(self):
mock_sudoers_file_path, mock_etc_sudoers_linux_patch_extension_file_path, backup_etc_sudoers_file_path, backup_etc_sudoers_linux_patch_extension_file_path = self.__ensure_tty_not_required_test_setup()
# Defaults set to required and /etc/sudoers.d/linuxpatchextension already set
mock_sudoers_content = "Defaults requiretty" + "\n" + "Defaults:" + self.env_layer.get_current_user() + " requiretty"
self.write_to_file(mock_sudoers_file_path, mock_sudoers_content)
self.env_layer.etc_sudoers_file_path = mock_sudoers_file_path
mock_etc_sudoers_linux_patch_extension_content = "Defaults:" + self.env_layer.get_current_user() + " !requiretty" + "\n"
self.write_to_file(mock_etc_sudoers_linux_patch_extension_file_path, mock_etc_sudoers_linux_patch_extension_content)
self.env_layer.etc_sudoers_linux_patch_extension_file_path = mock_etc_sudoers_linux_patch_extension_file_path
self.assertTrue(self.env_layer.is_tty_required_in_sudoers())
self.assertTrue(self.env_layer.is_tty_disabled_in_linux_patch_extension_sudoers())
self.assertFalse(self.env_layer.is_tty_required())
self.env_health_manager.ensure_tty_not_required()
etc_sudoers_linux_patch_extension_configuration = self.env_layer.file_system.read_with_retry(self.env_layer.etc_sudoers_linux_patch_extension_file_path)
settings = etc_sudoers_linux_patch_extension_configuration.strip().split('\n')
self.assertTrue("Defaults:" + self.env_layer.get_current_user() + " !requiretty" in settings)
# wrap up
self.__wrap_up_ensure_tty_not_required_test(backup_etc_sudoers_file_path, backup_etc_sudoers_linux_patch_extension_file_path)
def __ensure_tty_not_required_test_setup(self):
mock_sudoers_file_path = os.path.join(self.temp_dir, "etc-sudoers")
backup_etc_sudoers_file_path = self.env_layer.etc_sudoers_file_path
mock_etc_sudoers_linux_patch_extension_file_path = os.path.join(self.temp_dir, "etc-sudoers.d-linuxpatchextension")
backup_etc_sudoers_linux_patch_extension_file_path = self.env_layer.etc_sudoers_linux_patch_extension_file_path
return mock_sudoers_file_path, mock_etc_sudoers_linux_patch_extension_file_path, backup_etc_sudoers_file_path, backup_etc_sudoers_linux_patch_extension_file_path
def __wrap_up_ensure_tty_not_required_test(self, backup_etc_sudoers_file_path, backup_etc_sudoers_linux_patch_extension_file_path):
self.env_layer.etc_sudoers_file_path = backup_etc_sudoers_file_path
self.env_layer.etc_sudoers_linux_patch_extension_file_path = backup_etc_sudoers_linux_patch_extension_file_path
@staticmethod
def write_to_file(path, data):
with open(path, "w+") as file_handle:
file_handle.write(data)
if __name__ == '__main__':
SUITE = unittest.TestLoader().loadTestsFromTestCase(TestEnvManager)
unittest.TextTestRunner(verbosity=2).run(SUITE)
| 67.048889
| 208
| 0.808034
| 2,108
| 15,086
| 5.215844
| 0.08871
| 0.082947
| 0.129604
| 0.121874
| 0.815098
| 0.793633
| 0.790996
| 0.782083
| 0.780809
| 0.772533
| 0
| 0.000833
| 0.124155
| 15,086
| 225
| 209
| 67.048889
| 0.831303
| 0.08604
| 0
| 0.56129
| 0
| 0
| 0.03897
| 0.002399
| 0.03871
| 0
| 0
| 0
| 0.206452
| 1
| 0.090323
| false
| 0.006452
| 0.135484
| 0
| 0.23871
| 0.012903
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
4334c06e304d34181374392c66e7f756605de103
| 972
|
py
|
Python
|
fastapi/{{ cookiecutter.project_name }}/tests/api/test_api_with_auth.py
|
Hannarong98/templates
|
bf0c77809cd71c1208b442ce89ee7c5dc692ce34
|
[
"MIT"
] | 22
|
2021-06-24T23:00:59.000Z
|
2022-03-17T15:06:55.000Z
|
fastapi/{{ cookiecutter.project_name }}/tests/api/test_api_with_auth.py
|
Hannarong98/templates
|
bf0c77809cd71c1208b442ce89ee7c5dc692ce34
|
[
"MIT"
] | 28
|
2021-06-23T14:52:06.000Z
|
2022-03-02T13:41:06.000Z
|
fastapi/{{ cookiecutter.project_name }}/tests/api/test_api_with_auth.py
|
Hannarong98/templates
|
bf0c77809cd71c1208b442ce89ee7c5dc692ce34
|
[
"MIT"
] | 2
|
2021-11-06T11:33:48.000Z
|
2022-02-23T13:40:14.000Z
|
import pytest
from httpx import AsyncClient
from tests.api.auth_utils import create_access_token
@pytest.mark.asyncio
async def test_auth_view(client: AsyncClient):
response = await client.get('api/v1/hello')
assert response.status_code == 401
@pytest.mark.asyncio
async def test_auth_view(client: AsyncClient):
response = await client.get('api/v1/hello', headers={'Authorization': f'Bearer {create_access_token()}'})
assert response.status_code == 200
@pytest.mark.asyncio
async def test_auth_view_not_admin(client: AsyncClient):
response = await client.get('api/v1/hello-admin', headers={'Authorization': f'Bearer {create_access_token()}'})
assert response.status_code == 401
@pytest.mark.asyncio
async def test_auth_view_admin(client: AsyncClient):
response = await client.get(
'api/v1/hello-admin', headers={'Authorization': f'Bearer {create_access_token(roles=["AdminUser"])}'}
)
assert response.status_code == 200
| 32.4
| 115
| 0.747942
| 131
| 972
| 5.366412
| 0.282443
| 0.068279
| 0.096728
| 0.125178
| 0.859175
| 0.816501
| 0.816501
| 0.816501
| 0.763869
| 0.763869
| 0
| 0.01889
| 0.128601
| 972
| 29
| 116
| 33.517241
| 0.811098
| 0
| 0
| 0.47619
| 0
| 0
| 0.213992
| 0.090535
| 0
| 0
| 0
| 0
| 0.190476
| 1
| 0
| false
| 0
| 0.142857
| 0
| 0.142857
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
433ef033cc8254687a9d5ad30039c535aade1fce
| 51
|
py
|
Python
|
handy_features/decorators/__init__.py
|
vishwaefor/handy-python-features
|
cbc6f772655a1a329971cc2972d691501b2c66a1
|
[
"Apache-2.0"
] | 2
|
2019-07-05T18:07:36.000Z
|
2019-07-11T15:49:55.000Z
|
handy_features/decorators/__init__.py
|
vishwaefor/handy-python-features
|
cbc6f772655a1a329971cc2972d691501b2c66a1
|
[
"Apache-2.0"
] | 1
|
2019-07-03T08:18:21.000Z
|
2019-07-04T07:32:09.000Z
|
handy_features/decorators/__init__.py
|
vishwaefor/handy-python-features
|
cbc6f772655a1a329971cc2972d691501b2c66a1
|
[
"Apache-2.0"
] | null | null | null |
from .private_function_dec import private_function
| 25.5
| 50
| 0.901961
| 7
| 51
| 6.142857
| 0.714286
| 0.697674
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.078431
| 51
| 1
| 51
| 51
| 0.914894
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
4a28c933ff7862cdb014fcddfac0020197cc1b4e
| 104
|
py
|
Python
|
simuvex/simuvex/s_action_object.py
|
Ruide/angr-dev
|
964dc80c758e25c698c2cbcc454ef5954c5fa0a0
|
[
"BSD-2-Clause"
] | 86
|
2015-08-06T23:25:07.000Z
|
2022-02-17T14:58:22.000Z
|
simuvex/simuvex/s_action_object.py
|
Ruide/angr-dev
|
964dc80c758e25c698c2cbcc454ef5954c5fa0a0
|
[
"BSD-2-Clause"
] | 132
|
2015-09-10T19:06:59.000Z
|
2018-10-04T20:36:45.000Z
|
simuvex/simuvex/s_action_object.py
|
Ruide/angr-dev
|
964dc80c758e25c698c2cbcc454ef5954c5fa0a0
|
[
"BSD-2-Clause"
] | 80
|
2015-08-07T10:30:20.000Z
|
2020-03-21T14:45:28.000Z
|
print '... Importing simuvex/s_action_object.py ...'
from angr.state_plugins.sim_action_object import *
| 34.666667
| 52
| 0.788462
| 15
| 104
| 5.133333
| 0.866667
| 0.311688
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.086538
| 104
| 2
| 53
| 52
| 0.810526
| 0
| 0
| 0
| 0
| 0
| 0.423077
| 0.25
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 1
| null | null | 0.5
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 1
|
0
| 6
|
4a2b3739bd4c92b8eade9586fcaa89c72014ab58
| 9,683
|
py
|
Python
|
flink-python/pyflink/shell.py
|
journeyqiao/flink
|
164202bd9b4662f246e961fd964b96ae308cbcee
|
[
"Apache-2.0"
] | 1
|
2020-03-07T15:49:39.000Z
|
2020-03-07T15:49:39.000Z
|
flink-python/pyflink/shell.py
|
journeyqiao/flink
|
164202bd9b4662f246e961fd964b96ae308cbcee
|
[
"Apache-2.0"
] | 5
|
2021-03-30T04:48:08.000Z
|
2021-12-24T08:22:11.000Z
|
flink-python/pyflink/shell.py
|
journeyqiao/flink
|
164202bd9b4662f246e961fd964b96ae308cbcee
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
################################################################################
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
################################################################################
import atexit
import codecs
import os
import platform
import signal
import sys
from pyflink.common import *
from pyflink.dataset import *
from pyflink.datastream import *
from pyflink.table import *
from pyflink.table.catalog import *
from pyflink.table.descriptors import *
from pyflink.table.window import *
def _register_exit_handler():
def clean(*args, **kwargs):
try:
if "PYFLINK_INTERNAL_LIB" in os.environ:
files = os.environ["PYFLINK_INTERNAL_LIB"].split(os.pathsep)
for file in files:
if os.path.exists(file):
os.remove(file)
finally:
sys.exit()
atexit.register(clean)
# we already ignore the SIGINT so only process the SIGTERM
signal.signal(signal.SIGTERM, clean)
_register_exit_handler()
utf8_out = open(sys.stdout.fileno(), mode='w', encoding='utf8', buffering=1)
print("Using Python version %s (%s, %s)" % (
platform.python_version(),
platform.python_build()[0],
platform.python_build()[1]))
welcome_msg = u'''
\u2592\u2593\u2588\u2588\u2593\u2588\u2588\u2592
\u2593\u2588\u2588\u2588\u2588\u2592\u2592\u2588\u2593\u2592\u2593\u2588\u2588\u2588\u2593\u2592
\u2593\u2588\u2588\u2588\u2593\u2591\u2591 \u2592\u2592\u2592\u2593\u2588\u2588\u2592 \u2592
\u2591\u2588\u2588\u2592 \u2592\u2592\u2593\u2593\u2588\u2593\u2593\u2592\u2591 \u2592\u2588\u2588\u2588\u2588
\u2588\u2588\u2592 \u2591\u2592\u2593\u2588\u2588\u2588\u2592 \u2592\u2588\u2592\u2588\u2592
\u2591\u2593\u2588 \u2588\u2588\u2588 \u2593\u2591\u2592\u2588\u2588
\u2593\u2588 \u2592\u2592\u2592\u2592\u2592\u2593\u2588\u2588\u2593\u2591\u2592\u2591\u2593\u2593\u2588
\u2588\u2591 \u2588 \u2592\u2592\u2591 \u2588\u2588\u2588\u2593\u2593\u2588 \u2592\u2588\u2592\u2592\u2592
\u2588\u2588\u2588\u2588\u2591 \u2592\u2593\u2588\u2593 \u2588\u2588\u2592\u2592\u2592 \u2593\u2588\u2588\u2588\u2592
\u2591\u2592\u2588\u2593\u2593\u2588\u2588 \u2593\u2588\u2592 \u2593\u2588\u2592\u2593\u2588\u2588\u2593 \u2591\u2588\u2591
\u2593\u2591\u2592\u2593\u2588\u2588\u2588\u2588\u2592 \u2588\u2588 \u2592\u2588 \u2588\u2593\u2591\u2592\u2588\u2592\u2591\u2592\u2588\u2592
\u2588\u2588\u2588\u2593\u2591\u2588\u2588\u2593 \u2593\u2588 \u2588 \u2588\u2593 \u2592\u2593\u2588\u2593\u2593\u2588\u2592
\u2591\u2588\u2588\u2593 \u2591\u2588\u2591 \u2588 \u2588\u2592 \u2592\u2588\u2588\u2588\u2588\u2588\u2593\u2592 \u2588\u2588\u2593\u2591\u2592
\u2588\u2588\u2588\u2591 \u2591 \u2588\u2591 \u2593 \u2591\u2588 \u2588\u2588\u2588\u2588\u2588\u2592\u2591\u2591 \u2591\u2588\u2591\u2593 \u2593\u2591
\u2588\u2588\u2593\u2588 \u2592\u2592\u2593\u2592 \u2593\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2593\u2591 \u2592\u2588\u2592 \u2592\u2593 \u2593\u2588\u2588\u2593
\u2592\u2588\u2588\u2593 \u2593\u2588 \u2588\u2593\u2588 \u2591\u2592\u2588\u2588\u2588\u2588\u2588\u2593\u2593\u2592\u2591 \u2588\u2588\u2592\u2592 \u2588 \u2592 \u2593\u2588\u2592
\u2593\u2588\u2593 \u2593\u2588 \u2588\u2588\u2593 \u2591\u2593\u2593\u2593\u2593\u2593\u2593\u2593\u2592 \u2592\u2588\u2588\u2593 \u2591\u2588\u2592
\u2593\u2588 \u2588 \u2593\u2588\u2588\u2588\u2593\u2592\u2591 \u2591\u2593\u2593\u2593\u2588\u2588\u2588\u2593 \u2591\u2592\u2591 \u2593\u2588
\u2588\u2588\u2593 \u2588\u2588\u2592 \u2591\u2592\u2593\u2593\u2588\u2588\u2588\u2593\u2593\u2593\u2593\u2593\u2588\u2588\u2588\u2588\u2588\u2588\u2593\u2592 \u2593\u2588\u2588\u2588 \u2588
\u2593\u2588\u2588\u2588\u2592 \u2588\u2588\u2588 \u2591\u2593\u2593\u2592\u2591\u2591 \u2591\u2593\u2588\u2588\u2588\u2588\u2593\u2591 \u2591\u2592\u2593\u2592 \u2588\u2593
\u2588\u2593\u2592\u2592\u2593\u2593\u2588\u2588 \u2591\u2592\u2592\u2591\u2591\u2591\u2592\u2592\u2592\u2592\u2593\u2588\u2588\u2593\u2591 \u2588\u2593
\u2588\u2588 \u2593\u2591\u2592\u2588 \u2593\u2593\u2593\u2593\u2592\u2591\u2591 \u2592\u2588\u2593 \u2592\u2593\u2593\u2588\u2588\u2593 \u2593\u2592 \u2592\u2592\u2593
\u2593\u2588\u2593 \u2593\u2592\u2588 \u2588\u2593\u2591 \u2591\u2592\u2593\u2593\u2588\u2588\u2592 \u2591\u2593\u2588\u2592 \u2592\u2592\u2592\u2591\u2592\u2592\u2593\u2588\u2588\u2588\u2588\u2588\u2592
\u2588\u2588\u2591 \u2593\u2588\u2592\u2588\u2592 \u2592\u2593\u2593\u2592 \u2593\u2588 \u2588\u2591 \u2591\u2591\u2591\u2591 \u2591\u2588\u2592
\u2593\u2588 \u2592\u2588\u2593 \u2591 \u2588\u2591 \u2592\u2588 \u2588\u2593
\u2588\u2593 \u2588\u2588 \u2588\u2591 \u2593\u2593 \u2592\u2588\u2593\u2593\u2593\u2592\u2588\u2591
\u2588\u2593 \u2591\u2593\u2588\u2588\u2591 \u2593\u2592 \u2593\u2588\u2593\u2592\u2591\u2591\u2591\u2592\u2593\u2588\u2591 \u2592\u2588
\u2588\u2588 \u2593\u2588\u2593\u2591 \u2592 \u2591\u2592\u2588\u2592\u2588\u2588\u2592 \u2593\u2593
\u2593\u2588\u2592 \u2592\u2588\u2593\u2592\u2591 \u2592\u2592 \u2588\u2592\u2588\u2593\u2592\u2592\u2591\u2591\u2592\u2588\u2588
\u2591\u2588\u2588\u2592 \u2592\u2593\u2593\u2592 \u2593\u2588\u2588\u2593\u2592\u2588\u2592 \u2591\u2593\u2593\u2593\u2593\u2592\u2588\u2593
\u2591\u2593\u2588\u2588\u2592 \u2593\u2591 \u2592\u2588\u2593\u2588 \u2591\u2591\u2592\u2592\u2592
\u2592\u2593\u2593\u2593\u2593\u2593\u2592\u2592\u2592\u2592\u2592\u2592\u2592\u2592\u2592\u2592\u2592\u2592\u2592\u2592\u2592\u2592\u2592\u2592\u2592\u2592\u2592\u2592\u2592\u2591\u2591\u2593\u2593 \u2593\u2591\u2592\u2588\u2591
F L I N K - P Y T H O N - S H E L L
NOTE: Use the prebound Table Environment to implement batch or streaming Table programs.
Batch - Use 'b_env' and 'bt_env' variables
*
* import tempfile
* import os
* import shutil
* sink_path = tempfile.gettempdir() + '/batch.csv'
* if os.path.exists(sink_path):
* if os.path.isfile(sink_path):
* os.remove(sink_path)
* else:
* shutil.rmtree(sink_path)
* b_env.set_parallelism(1)
* t = bt_env.from_elements([(1, 'hi', 'hello'), (2, 'hi', 'hello')], ['a', 'b', 'c'])
* bt_env.connect(FileSystem().path(sink_path)) \\
* .with_format(OldCsv()
* .field_delimiter(',')
* .field("a", DataTypes.BIGINT())
* .field("b", DataTypes.STRING())
* .field("c", DataTypes.STRING())) \\
* .with_schema(Schema()
* .field("a", DataTypes.BIGINT())
* .field("b", DataTypes.STRING())
* .field("c", DataTypes.STRING())) \\
* .create_temporary_table("batch_sink")
*
* t.select("a + 1, b, c").insert_into("batch_sink")
*
* bt_env.execute("batch_job")
Streaming - Use 's_env' and 'st_env' variables
*
* import tempfile
* import os
* import shutil
* sink_path = tempfile.gettempdir() + '/streaming.csv'
* if os.path.exists(sink_path):
* if os.path.isfile(sink_path):
* os.remove(sink_path)
* else:
* shutil.rmtree(sink_path)
* s_env.set_parallelism(1)
* t = st_env.from_elements([(1, 'hi', 'hello'), (2, 'hi', 'hello')], ['a', 'b', 'c'])
* st_env.connect(FileSystem().path(sink_path)) \\
* .with_format(OldCsv()
* .field_delimiter(',')
* .field("a", DataTypes.BIGINT())
* .field("b", DataTypes.STRING())
* .field("c", DataTypes.STRING())) \\
* .with_schema(Schema()
* .field("a", DataTypes.BIGINT())
* .field("b", DataTypes.STRING())
* .field("c", DataTypes.STRING())) \\
* .create_temporary_table("stream_sink")
*
* t.select("a + 1, b, c").insert_into("stream_sink")
*
* st_env.execute("stream_job")
'''
utf8_out.write(welcome_msg)
b_env = ExecutionEnvironment.get_execution_environment()
bt_env = BatchTableEnvironment.create(b_env)
s_env = StreamExecutionEnvironment.get_execution_environment()
st_env = StreamTableEnvironment.create(s_env)
| 58.684848
| 242
| 0.633275
| 1,237
| 9,683
| 4.898141
| 0.16249
| 0.204654
| 0.136161
| 0.082522
| 0.694339
| 0.593992
| 0.444628
| 0.269681
| 0.224294
| 0.156627
| 0
| 0.36193
| 0.218734
| 9,683
| 164
| 243
| 59.042683
| 0.438995
| 0.08706
| 0
| 0.265625
| 0
| 0.257813
| 0.861018
| 0.438993
| 0
| 0
| 0
| 0
| 0
| 1
| 0.015625
| false
| 0
| 0.148438
| 0
| 0.164063
| 0.007813
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
4a38a518066c13935febff437af0522d985ef3d4
| 1,683
|
py
|
Python
|
Complete/Tests/test_formy_submit_success.py
|
tim-corley/Selenium-Starter-Kit
|
e74beae52c97464f40c034996c0645fe3f8cc235
|
[
"Unlicense",
"MIT"
] | null | null | null |
Complete/Tests/test_formy_submit_success.py
|
tim-corley/Selenium-Starter-Kit
|
e74beae52c97464f40c034996c0645fe3f8cc235
|
[
"Unlicense",
"MIT"
] | 1
|
2021-06-02T00:54:01.000Z
|
2021-06-02T00:54:01.000Z
|
Complete/Tests/test_formy_submit_success.py
|
tim-corley/Selenium-Starter-Kit
|
e74beae52c97464f40c034996c0645fe3f8cc235
|
[
"Unlicense",
"MIT"
] | null | null | null |
# must be within Tests folder to run
# Complete/Tests $ pytest -v
import sys
sys.path.append('..')
from Pages.page_form import FormPage
from globals import FormyGlobals
from selenium import webdriver
import pytest
class TestFormSuccessChrome():
@pytest.fixture()
def test_setup(self):
global driver
path = str(FormyGlobals.chrome_driver_path)
driver = webdriver.Chrome(executable_path=path)
driver.implicitly_wait(10)
driver.maximize_window()
driver.get(FormyGlobals.form_url)
yield
driver.close()
driver.quit()
print('\nTest Completed\n')
def test_complete_form_success(self, test_setup):
form = FormPage(driver)
form.complete_form(FormyGlobals.first_name, FormyGlobals.last_name, FormyGlobals.job_title, FormyGlobals.date_string)
form.submit_form()
assert 'The form was successfully submitted!' == form.success_result()
class TestFormSuccessFirefox():
@pytest.fixture()
def test_setup(self):
global driver
path = str(FormyGlobals.gecko_driver_path)
driver = webdriver.Firefox(executable_path=path)
driver.implicitly_wait(10)
driver.maximize_window()
driver.get(FormyGlobals.form_url)
yield
driver.close()
driver.quit()
print('\nTest Completed\n')
def test_complete_form_success(self, test_setup):
form = FormPage(driver)
form.complete_form(FormyGlobals.first_name, FormyGlobals.last_name, FormyGlobals.job_title, FormyGlobals.date_string)
form.submit_form()
assert 'The form was successfully submitted!' == form.success_result()
| 34.346939
| 125
| 0.694593
| 194
| 1,683
| 5.835052
| 0.345361
| 0.024735
| 0.028269
| 0.035336
| 0.738516
| 0.738516
| 0.738516
| 0.738516
| 0.738516
| 0.738516
| 0
| 0.003028
| 0.215092
| 1,683
| 48
| 126
| 35.0625
| 0.853899
| 0.036245
| 0
| 0.714286
| 0
| 0
| 0.067943
| 0
| 0
| 0
| 0
| 0
| 0.047619
| 1
| 0.095238
| false
| 0
| 0.119048
| 0
| 0.261905
| 0.047619
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
4a7c85e64191816a7e4f343d7e0cd40d781fc50f
| 8,689
|
py
|
Python
|
src/vfb_query_builder/test/TermInfo_schema_tests.py
|
VirtualFlyBrain/VFB_json_schema
|
998f8cf4d580fd74b9bfb38c958123233a8d8d1a
|
[
"Apache-2.0"
] | null | null | null |
src/vfb_query_builder/test/TermInfo_schema_tests.py
|
VirtualFlyBrain/VFB_json_schema
|
998f8cf4d580fd74b9bfb38c958123233a8d8d1a
|
[
"Apache-2.0"
] | 76
|
2018-11-09T12:03:00.000Z
|
2021-12-14T18:06:11.000Z
|
src/vfb_query_builder/test/TermInfo_schema_tests.py
|
VirtualFlyBrain/VFB_json_schema
|
998f8cf4d580fd74b9bfb38c958123233a8d8d1a
|
[
"Apache-2.0"
] | null | null | null |
import unittest
from vfb_query_builder.query_roller import QueryLibrary, query_builder
from .test_tools import TestWrapper
class TermInfoRollerTest(unittest.TestCase):
def setUp(self):
self.ql = QueryLibrary()
self.qw = TestWrapper('vfb_termInfo.json')
print("Running", self.id().split('.')[1:])
def test_class_term(self):
query = query_builder(query_labels=["Class"],
clauses=[self.ql.term()],
query_short_forms=['FBbt_00000591'])
r = self.qw.test(t=self,
query=query)
def test_individual_term(self):
query = query_builder(query_labels=["Individual"],
clauses=[self.ql.term()],
query_short_forms=['VFB_00011179'])
r = self.qw.test(t=self,
query=query)
def test_class_images(self):
query = query_builder(query_labels=["Class"],
clauses=[self.ql.term(),
self.ql.anatomy_channel_image()],
query_short_forms=['FBbt_00007422'])
r = self.qw.test(t=self,
query=query)
def test_class_xrefs(self):
query = query_builder(query_labels=["Class"],
clauses=[self.ql.term(),
self.ql.xrefs()],
query_short_forms=['VFBexp_FBtp0123937FBtp0120068'])
r = self.qw.test(t=self,
query=query)
def test_class_parents(self):
query = query_builder(query_labels=["Class"],
clauses=[self.ql.term(),
self.ql.parents()],
query_short_forms=['FBbt_00007422'])
r = self.qw.test(t=self,
query=query)
def test_class_relationships(self):
query = query_builder(query_labels=["Class"],
clauses=[self.ql.term(),
self.ql.relationships()],
query_short_forms=['FBbt_00007422'])
r = self.qw.test(t=self,
query=query)
def test_class_def_pubs(self):
query = query_builder(query_labels=["Class"],
clauses=[self.ql.term(),
self.ql.def_pubs()],
query_short_forms=['FBbt_00000591'])
r = self.qw.test(t=self,
query=query)
def test_class_pub_syn(self):
query = query_builder(query_labels=["Class"],
clauses=[self.ql.term(),
self.ql.pub_syn()],
query_short_forms=['FBbt_00000591'])
r = self.qw.test(t=self,
query=query)
def test_individual_relationships(self):
query = query_builder(query_labels=["Individual"],
clauses=[self.ql.term(),
self.ql.relationships()],
query_short_forms=['VFB_00011179'])
r = self.qw.test(t=self,
query=query)
def test_individual_parents(self):
query = query_builder(query_labels=["Individual"],
clauses=[self.ql.term(),
self.ql.parents()],
query_short_forms=['VFB_00011179'])
r = self.qw.test(t=self,
query=query)
def test_individual_xrefs(self):
query = query_builder(query_labels=["Individual"],
clauses=[self.ql.term(),
self.ql.xrefs()],
query_short_forms=['VFB_00010249'])
r = self.qw.test(t=self,
query=query)
def test_individual_image(self):
query = query_builder(query_labels=["Individual"],
clauses=[self.ql.term(),
self.ql.channel_image()],
query_short_forms=['VFB_00011179'])
r = self.qw.test(t=self,
query=query)
def test_individual_dataset_license(self):
query = query_builder(query_labels=["Individual"],
clauses=[self.ql.term(),
self.ql.dataSet_license()],
query_short_forms=['VFB_00011179'])
r = self.qw.test(t=self,
query=query)
def test_class(self):
query = self.ql.class_term_info(short_form=['FBbt_00047035'])
r = self.qw.test(t=self,
query=query)
def test_individual(self):
query = self.ql.anatomical_ind_term_info(short_form=['VFB_jrchjtdq'])
r = self.qw.test(t=self,
query=query)
def test_dataset_license(self):
query = query_builder(query_labels=['DataSet'],
query_short_forms=['Ito2013'],
clauses=[self.ql.term(),
self.ql.license()])
r = self.qw.test(t=self,
query=query)
def test_dataset(self):
query = self.ql.dataset_term_info(short_form=['Ito2013'])
r = self.qw.test(t=self,
query=query)
def test_license(self):
query = self.ql.license_term_info(short_form=['VFBlicense_CC_BY_SA_4_0'])
r = self.qw.test(t=self,
query=query)
def test_dataset_xrefs(self):
query = query_builder(query_labels=['DataSet'],
query_short_forms=['Ito2013'],
clauses=[self.ql.term(),
self.ql.xrefs()])
r = self.qw.test(t=self,
query=query)
def test_dataset_pub(self):
query = query_builder(query_labels=['DataSet'],
query_short_forms=['Ito2013'],
clauses=[self.ql.term(),
self.ql.pubs()])
r = self.qw.test(t=self,
query=query)
def test_dataset_anatomy_channel_image(self):
query = query_builder(query_labels=['DataSet'],
query_short_forms=['Ito2013'],
clauses=[self.ql.term(),
self.ql.anatomy_channel_image()])
r = self.qw.test(t=self,
query=query)
def test_template(self):
query = self.ql.template_term_info(short_form=["VFB_00017894"], pretty_print=True)
r = self.qw.test(t=self,
query=query)
def test_template_domains(self):
query = query_builder(query_labels=['Template'],
query_short_forms=['VFB_00017894'],
clauses=[self.ql.term(),
self.ql.template_domain()])
r = self.qw.test(t=self,
query=query)
def test_template_channel(self):
query = query_builder(query_labels=['Template'],
query_short_forms=['VFB_00017894'],
clauses=[self.ql.term(),
self.ql.template_channel()])
r = self.qw.test(t=self,
query=query)
def test_neuron_class(self):
query = self.ql.neuron_class_term_info(short_form=["FBbt_00047609"], pretty_print=True)
r = self.qw.test(t=self,
query=query)
def test_neuron_class_null_split(self):
# Splits unlikely to be directly annotated with neuron only
query = self.ql.neuron_class_term_info(short_form=["FBbt_00005106"], pretty_print=True)
r = self.qw.test(t=self,
query=query)
def test_split_class(self):
query = self.ql.split_class_term_info(short_form=["VFBexp_FBtp0123136FBtp0119953"], pretty_print=True)
r = self.qw.test(t=self,
query=query)
def test_pub(self):
query = self.ql.pub_term_info(short_form=['FBrf0221438'])
r = self.qw.test(t=self,
query=query)
def tearDown(self):
return
if __name__ == '__main__':
unittest.main(verbosity=2)
| 40.602804
| 110
| 0.486477
| 874
| 8,689
| 4.586957
| 0.106407
| 0.123472
| 0.164131
| 0.076827
| 0.805188
| 0.777002
| 0.770516
| 0.75106
| 0.745822
| 0.72911
| 0
| 0.039136
| 0.403038
| 8,689
| 213
| 111
| 40.793427
| 0.733757
| 0.00656
| 0
| 0.653631
| 0
| 0
| 0.062109
| 0.009386
| 0
| 0
| 0
| 0
| 0
| 1
| 0.167598
| false
| 0
| 0.01676
| 0.005587
| 0.195531
| 0.027933
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
4a9ebffbe7102fca646b8220663a12cf4794c21c
| 8,932
|
py
|
Python
|
tests/paths_test.py
|
rhofour/InfiniTDBackend
|
8763d64a82d02e4282abff5419e1ab256af41d7e
|
[
"MIT"
] | null | null | null |
tests/paths_test.py
|
rhofour/InfiniTDBackend
|
8763d64a82d02e4282abff5419e1ab256af41d7e
|
[
"MIT"
] | null | null | null |
tests/paths_test.py
|
rhofour/InfiniTDBackend
|
8763d64a82d02e4282abff5419e1ab256af41d7e
|
[
"MIT"
] | null | null | null |
import unittest
from random import Random
import numpy as np
from infinitd_server.game_config import CellPos, Row, Col
from infinitd_server.battleground_state import BattlegroundState, BgTowersState, BgTowerState
from infinitd_server.paths import makePathMap, compressPath, PathMap, pathExists
def emptyBattleground(rows: int, cols: int):
return BattlegroundState(towers = BgTowersState([[None for c in range(cols)] for r in range(rows)]))
class TestGetRandomPath(unittest.TestCase):
def test_diagonal2(self):
battleground = emptyBattleground(2, 2)
start = CellPos(Row(0), Col(0))
end = CellPos(Row(1), Col(1))
pathMap = makePathMap(battleground, start, end)
self.assertIsNotNone(pathMap)
for i in range(10):
with self.subTest(seed=i):
path = pathMap.getRandomPath(start, Random(i)) # pytype: disable=attribute-error
self.assertEqual(len(path), 3)
self.assertEqual(path[0], start)
self.assertEqual(path[-1], end)
# Make sure each position is adjacent to the previous
prevElem = path[0]
for elem in path[1:]:
self.assertGreaterEqual(elem.row, prevElem.row - 1)
self.assertLessEqual(elem.row, prevElem.row + 1)
self.assertGreaterEqual(elem.col, prevElem.col - 1)
self.assertLessEqual(elem.col, prevElem.col + 1)
prevElem = elem
def test_diagonal5(self):
battleground = emptyBattleground(5, 5)
start = CellPos(Row(0), Col(0))
end = CellPos(Row(4), Col(4))
pathMap = makePathMap(battleground, start, end)
self.assertIsNotNone(pathMap)
for i in range(10):
with self.subTest(seed=i):
path = pathMap.getRandomPath(start, Random(i)) # pytype: disable=attribute-error
self.assertEqual(len(path), 9)
self.assertEqual(path[0], start)
self.assertEqual(path[-1], end)
# Make sure each position is adjacent to the previous
prevElem = path[0]
for elem in path[1:]:
self.assertGreaterEqual(elem.row, prevElem.row - 1)
self.assertLessEqual(elem.row, prevElem.row + 1)
self.assertGreaterEqual(elem.col, prevElem.col - 1)
self.assertLessEqual(elem.col, prevElem.col + 1)
prevElem = elem
def test_diagonal5_with_obstacles(self):
battleground = emptyBattleground(5, 5)
battleground.towers.towers[2][2] = BgTowerState(0)
battleground.towers.towers[2][3] = BgTowerState(0)
battleground.towers.towers[3][2] = BgTowerState(0)
battleground.towers.towers[3][3] = BgTowerState(0)
start = CellPos(Row(0), Col(0))
end = CellPos(Row(4), Col(4))
pathMap = makePathMap(battleground, start, end)
self.assertIsNotNone(pathMap)
for i in range(10):
with self.subTest(seed=i):
path = pathMap.getRandomPath(start, Random(i)) # pytype: disable=attribute-error
self.assertEqual(len(path), 9)
self.assertEqual(path[0], start)
self.assertEqual(path[-1], end)
# Make sure each position is adjacent to the previous
prevElem = path[0]
for elem in path[1:]:
self.assertGreaterEqual(elem.row, prevElem.row - 1)
self.assertLessEqual(elem.row, prevElem.row + 1)
self.assertGreaterEqual(elem.col, prevElem.col - 1)
self.assertLessEqual(elem.col, prevElem.col + 1)
prevElem = elem
class TestPathExists(unittest.TestCase):
def test_startBlocked(self):
battleground = emptyBattleground(2, 2)
battleground.towers.towers[0][0] = BgTowerState(0)
self.assertFalse(pathExists(battleground, CellPos(Row(0), Col(0)), CellPos(Row(1), Col(1))))
def test_endBlocked(self):
battleground = emptyBattleground(2, 2)
battleground.towers.towers[1][1] = BgTowerState(0)
self.assertFalse(pathExists(battleground, CellPos(Row(0), Col(0)), CellPos(Row(1), Col(1))))
def test_noPath(self):
battleground = emptyBattleground(2, 2)
battleground.towers.towers[0][1] = BgTowerState(0)
battleground.towers.towers[1][0] = BgTowerState(0)
self.assertFalse(pathExists(battleground, CellPos(Row(0), Col(0)), CellPos(Row(1), Col(1))))
def test_oneStepPath(self):
battleground = emptyBattleground(2, 2)
self.assertTrue(pathExists(battleground, CellPos(Row(0), Col(0)), CellPos(Row(1), Col(1))))
def test_multiStepPath(self):
battleground = emptyBattleground(2, 3)
battleground.towers.towers[0][1] = BgTowerState(0)
self.assertTrue(pathExists(battleground, CellPos(Row(0), Col(0)), CellPos(Row(0), Col(2))))
def test_multiplePaths(self):
battleground = emptyBattleground(3, 3)
battleground.towers.towers[1][1] = BgTowerState(0)
self.assertTrue(pathExists(battleground, CellPos(Row(0), Col(0)), CellPos(Row(2), Col(2))))
def test_manyPaths(self):
battleground = emptyBattleground(3, 3)
self.assertTrue(pathExists(battleground, CellPos(Row(0), Col(0)), CellPos(Row(2), Col(2))))
class TestMakePathMap(unittest.TestCase):
def test_startBlocked(self):
battleground = emptyBattleground(2, 2)
battleground.towers.towers[0][0] = BgTowerState(0)
pathMap = makePathMap(battleground, CellPos(Row(0), Col(0)), CellPos(Row(1), Col(1)))
self.assertIsNone(pathMap)
def test_endBlocked(self):
battleground = emptyBattleground(2, 2)
battleground.towers.towers[1][1] = BgTowerState(0)
pathMap = makePathMap(battleground, CellPos(Row(0), Col(0)), CellPos(Row(1), Col(1)))
self.assertIsNone(pathMap)
def test_noPath(self):
battleground = emptyBattleground(2, 2)
battleground.towers.towers[0][1] = BgTowerState(0)
battleground.towers.towers[1][0] = BgTowerState(0)
pathMap = makePathMap(battleground, CellPos(Row(0), Col(0)), CellPos(Row(1), Col(1)))
self.assertIsNone(pathMap)
def test_oneStepPath(self):
battleground = emptyBattleground(2, 2)
pathMap = makePathMap(battleground, CellPos(Row(0), Col(0)), CellPos(Row(0), Col(1)))
np.testing.assert_array_equal(
pathMap.dists, np.asarray([[0, 1], [-1, -1]]))
def test_multiStepPath(self):
battleground = emptyBattleground(2, 3)
battleground.towers.towers[0][1] = BgTowerState(0)
pathMap = makePathMap(battleground, CellPos(Row(0), Col(0)), CellPos(Row(0), Col(2)))
np.testing.assert_array_equal(
pathMap.dists, np.asarray([[0, -1, 4], [1, 2, 3]]))
def test_multiplePaths(self):
battleground = emptyBattleground(3, 3)
battleground.towers.towers[1][1] = BgTowerState(0)
pathMap = makePathMap(battleground, CellPos(Row(0), Col(0)), CellPos(Row(2), Col(2)))
np.testing.assert_array_equal(pathMap.dists,
np.asarray([[0, 1, 2], [1, -1, 3], [2, 3, 4]]))
def test_manyPaths(self):
battleground = emptyBattleground(3, 3)
pathMap = makePathMap(battleground, CellPos(Row(0), Col(0)), CellPos(Row(2), Col(2)))
np.testing.assert_array_equal(
pathMap.dists, np.asarray([[0, 1, 2], [1, 2, 3], [2, 3, 4]]))
class TestCompressPath(unittest.TestCase):
def test_twoNodePaths(self):
path1 = [CellPos(Row(0), Col(0)), CellPos(Row(0), Col(1))]
path2 = [CellPos(Row(0), Col(0)), CellPos(Row(1), Col(0))]
newPath1 = compressPath(path1)
newPath2 = compressPath(path2)
self.assertListEqual(newPath1, path1)
self.assertListEqual(newPath2, path2)
def test_singleChainPath(self):
path1 = [CellPos(Row(0), Col(0)), CellPos(Row(0), Col(1)), CellPos(Row(0), Col(2))]
path2 = [CellPos(Row(0), Col(0)), CellPos(Row(1), Col(0)), CellPos(Row(2), Col(0)),
CellPos(Row(3), Col(0))]
newPath1 = compressPath(path1)
newPath2 = compressPath(path2)
self.assertListEqual(newPath1, [CellPos(Row(0), Col(0)), CellPos(Row(0), Col(2))])
self.assertListEqual(newPath2, [CellPos(Row(0), Col(0)), CellPos(Row(3), Col(0))])
def test_twoCorners(self):
path = [CellPos(Row(0), Col(0)), CellPos(Row(0), Col(1)), CellPos(Row(0), Col(2)),
CellPos(Row(1), Col(2)), CellPos(Row(1), Col(3))]
newPath = compressPath(path)
self.assertListEqual(newPath,
[CellPos(Row(0), Col(0)), CellPos(Row(0), Col(2)), CellPos(Row(1), Col(2)),
CellPos(Row(1), Col(3))])
| 41.16129
| 104
| 0.613636
| 1,061
| 8,932
| 5.132894
| 0.102733
| 0.108336
| 0.070694
| 0.089974
| 0.864304
| 0.844106
| 0.82097
| 0.812707
| 0.777084
| 0.771025
| 0
| 0.041691
| 0.248097
| 8,932
| 216
| 105
| 41.351852
| 0.769208
| 0.028101
| 0
| 0.7
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.26875
| 1
| 0.13125
| false
| 0
| 0.0375
| 0.00625
| 0.2
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
4ac3ca9d9755d6fe5e8ceead186b8b9dfc7deabd
| 24,475
|
py
|
Python
|
furnace/seg_opr/loss_opr.py
|
akinoriosamura/TorchSeg-mirror
|
34033fe85fc24015bcef7a92aad39d2a25a001a5
|
[
"MIT"
] | null | null | null |
furnace/seg_opr/loss_opr.py
|
akinoriosamura/TorchSeg-mirror
|
34033fe85fc24015bcef7a92aad39d2a25a001a5
|
[
"MIT"
] | 1
|
2021-06-08T20:36:43.000Z
|
2021-06-08T20:36:43.000Z
|
furnace/seg_opr/loss_opr.py
|
akinoriosamura/TorchSeg-mirror
|
34033fe85fc24015bcef7a92aad39d2a25a001a5
|
[
"MIT"
] | null | null | null |
import numpy as np
import scipy.ndimage as nd
import torch
import torch.nn as nn
import torch.nn.functional as F
from engine.logger import get_logger
from seg_opr.seg_oprs import one_hot
logger = get_logger()
class SigmoidFocalLoss(nn.Module):
def __init__(self, ignore_label, gamma=2.0, alpha=0.25,
reduction='mean'):
super(SigmoidFocalLoss, self).__init__()
self.ignore_label = ignore_label
self.gamma = gamma
self.alpha = alpha
self.reduction = reduction
def forward(self, pred, target):
b, h, w = target.size()
pred = pred.view(b, -1, 1)
pred_sigmoid = pred.sigmoid()
target = target.view(b, -1).float()
mask = (target.ne(self.ignore_label)).float()
target = mask * target
onehot = target.view(b, -1, 1)
# TODO: use the pred instead of pred_sigmoid
max_val = (-pred_sigmoid).clamp(min=0)
pos_part = (1 - pred_sigmoid) ** self.gamma * (
pred_sigmoid - pred_sigmoid * onehot)
neg_part = pred_sigmoid ** self.gamma * (max_val + (
(-max_val).exp() + (-pred_sigmoid - max_val).exp()).log())
loss = -(self.alpha * pos_part + (1 - self.alpha) * neg_part).sum(
dim=-1) * mask
if self.reduction == 'mean':
loss = loss.mean()
return loss
class ProbOhemCrossEntropy2d(nn.Module):
def __init__(self, ignore_label, reduction='mean', thresh=0.6, min_kept=256,
down_ratio=1, use_weight=False):
super(ProbOhemCrossEntropy2d, self).__init__()
self.ignore_label = ignore_label
self.thresh = float(thresh)
self.min_kept = int(min_kept)
self.down_ratio = down_ratio
if use_weight:
weight = torch.FloatTensor(
[1.4297, 1.4805, 1.4363, 3.365, 2.6635, 1.4311, 2.1943, 1.4817,
1.4513, 2.1984, 1.5295, 1.6892, 3.2224, 1.4727, 7.5978, 9.4117,
15.2588, 5.6818, 2.2067])
self.criterion = torch.nn.CrossEntropyLoss(reduction=reduction,
weight=weight,
ignore_index=ignore_label)
else:
self.criterion = torch.nn.CrossEntropyLoss(reduction=reduction,
ignore_index=ignore_label)
def forward(self, pred, target):
b, c, h, w = pred.size()
target = target.view(-1)
valid_mask = target.ne(self.ignore_label)
target = target * valid_mask.long()
num_valid = valid_mask.sum()
prob = F.softmax(pred, dim=1)
prob = (prob.transpose(0, 1)).reshape(c, -1)
if self.min_kept > num_valid:
logger.info('Labels: {}'.format(num_valid))
elif num_valid > 0:
prob = prob.masked_fill_(1 - valid_mask, 1)
mask_prob = prob[
target, torch.arange(len(target), dtype=torch.long)]
threshold = self.thresh
if self.min_kept > 0:
_, index = torch.sort(mask_prob)
threshold_index = index[min(len(index), self.min_kept) - 1]
if mask_prob[threshold_index] > self.thresh:
threshold = mask_prob[threshold_index]
kept_mask = mask_prob.le(threshold)
target = target * kept_mask.long()
valid_mask = valid_mask * kept_mask
# logger.info('Valid Mask: {}'.format(valid_mask.sum()))
target = target.masked_fill_(1 - valid_mask, self.ignore_label)
target = target.view(b, h, w)
return self.criterion(pred, target)
class PiecewiseProbOhemCrossEntropy2d(nn.Module):
def __init__(self, ignore_label, reduction='mean', thresh=0.6, min_kept=256,
down_ratio=1, use_weight=False, epoch_thresh=None):
super(PiecewiseProbOhemCrossEntropy2d, self).__init__()
self.ignore_label = ignore_label
self.thresh = float(thresh)
self.min_kept = int(min_kept)
self.down_ratio = down_ratio
self.epoch_thresh = epoch_thresh
if use_weight:
weight = torch.FloatTensor(
[1.4297, 1.4805, 1.4363, 3.365, 2.6635, 1.4311, 2.1943, 1.4817,
1.4513, 2.1984, 1.5295, 1.6892, 3.2224, 1.4727, 7.5978, 9.4117,
15.2588, 5.6818, 2.2067])
self.criterion = torch.nn.CrossEntropyLoss(reduction=reduction,
weight=weight,
ignore_index=ignore_label)
else:
self.criterion = torch.nn.CrossEntropyLoss(reduction=reduction,
ignore_index=ignore_label)
def forward(self, pred, target, num_epoch):
b, c, h, w = pred.size()
target = target.view(-1)
valid_mask = target.ne(self.ignore_label)
target = target * valid_mask.long()
num_valid = valid_mask.sum()
if num_epoch > self.epoch_thresh:
prob = F.softmax(pred, dim=1)
prob = (prob.transpose(0, 1)).reshape(c, -1)
if self.min_kept > num_valid:
logger.info('Labels: {}'.format(num_valid))
elif num_valid > 0:
prob = prob.masked_fill_(1 - valid_mask, 1)
mask_prob = prob[
target, torch.arange(len(target), dtype=torch.long)]
threshold = self.thresh
if self.min_kept > 0:
index = mask_prob.argsort()
threshold_index = index[min(len(index), self.min_kept) - 1]
if mask_prob[threshold_index] > self.thresh:
threshold = mask_prob[threshold_index]
kept_mask = mask_prob.le(threshold)
target = target * kept_mask.long()
valid_mask = valid_mask * kept_mask
# logger.info('Valid Mask: {}'.format(valid_mask.sum()))
target = target.masked_fill_(1 - valid_mask, self.ignore_label)
target = target.view(b, h, w)
return self.criterion(pred, target)
class FocalProb(nn.Module):
def __init__(self, ignore_label, reduction='mean', thresh=0.6, min_kept=256,
gamma=2):
super(FocalProb, self).__init__()
self.ignore_label = ignore_label
self.thresh = float(thresh)
self.min_kept = int(min_kept)
self.gamma = gamma
self.criterion = torch.nn.CrossEntropyLoss(reduction=reduction,
ignore_index=ignore_label)
def forward(self, pred, target):
b, c, h, w = pred.size()
target = target.view(-1)
valid_mask = target.ne(self.ignore_label)
target = target * valid_mask.long()
num_valid = valid_mask.sum()
prob = F.softmax(pred, dim=1)
prob = (prob.transpose(0, 1)).reshape(c, -1)
if self.min_kept > num_valid:
logger.info('Labels: {}'.format(num_valid))
elif num_valid > 0:
prob = prob.masked_fill_(1 - valid_mask, 1)
mask_prob = prob[
target, torch.arange(len(target), dtype=torch.long)]
threshold = self.thresh
if self.min_kept > 0:
index = mask_prob.argsort()
threshold_index = index[min(len(index), self.min_kept) - 1]
if mask_prob[threshold_index] > self.thresh:
threshold = mask_prob[threshold_index]
kept_mask = mask_prob.le(threshold)
target = target * kept_mask.long()
valid_mask = valid_mask * kept_mask
# logger.info('Valid Mask: {}'.format(valid_mask.sum()))
target = target.masked_fill_(1 - valid_mask, self.ignore_label)
target = target.view(b, h, w)
return self.criterion(pred, target)
class AutoOhemCrossEntropy2d(nn.Module):
def __init__(self, ignore_label, reduction='mean', drop_ratio=0.3):
super(AutoOhemCrossEntropy2d, self).__init__()
self.ignore_label = ignore_label
self.drop_ratio = float(drop_ratio)
self.criterion = torch.nn.CrossEntropyLoss(reduction=reduction,
ignore_index=ignore_label)
def forward(self, pred, target):
b, c, h, w = pred.size()
target = target.view(-1)
valid_mask = target.ne(self.ignore_label)
target = target * valid_mask.long()
prob = F.softmax(pred, dim=1)
prob = prob.view(b, c, -1)
similarity = torch.matmul(prob.permute(0, 2, 1), prob)
similarity = torch.sum(similarity, dim=2) / (h * w)
sorted_similarity, _ = torch.sort(similarity, dim=1, descending=True)
prob_threshold = sorted_similarity[:,
int(h * w * self.drop_ratio)].view(b, 1)
kept_mask = similarity.lt(prob_threshold).view(-1)
valid_mask = valid_mask * kept_mask
target = target.masked_fill_(1 - valid_mask, self.ignore_label)
target = target.view(b, h, w)
return self.criterion(pred, target)
class PriorLoss(nn.Module):
def __init__(self, scale, num_class, ignore_index):
super(PriorLoss, self).__init__()
self.scale = scale
self.num_class = num_class
self.ignore_index = ignore_index
self.criterion = torch.nn.BCELoss(reduction='none')
def forward(self, pred, target):
b, h, w = target.size()
scaled_gts = F.interpolate((target.view(b, 1, h, w)).float(),
scale_factor=self.scale,
mode="nearest")
valid_mask = torch.ones_like(scaled_gts)
valid_mask[scaled_gts == self.ignore_index] = 0
valid_vector = valid_mask.view(b, -1, 1)
valid_mask = torch.bmm(valid_vector, valid_vector.permute(0, 2, 1))
scaled_gts[scaled_gts == self.ignore_index] = self.num_class
scaled_gts = scaled_gts.squeeze_()
C = self.num_class + 1
one_hot_gts = one_hot(scaled_gts, C).view(b, C, -1)
similarity_gts = torch.bmm(one_hot_gts.permute(0, 2, 1),
one_hot_gts)
bce_loss = self.criterion(pred, similarity_gts)
num_valid = valid_mask.sum()
num_valid = torch.where(num_valid > 0, num_valid,
torch.ones(1, device=num_valid.device))
bce_loss = valid_mask * bce_loss
bce_loss = bce_loss.sum() / num_valid
valid_vector = valid_vector.view(b, -1)
num_valid = valid_vector.sum()
num_valid = torch.where(num_valid > 0, num_valid,
torch.ones(1, device=num_valid.device))
vtarget = similarity_gts * valid_mask
precision_part = torch.sum(pred * vtarget, dim=2)
denominator = torch.sum(pred, dim=2)
denominator = denominator.masked_fill_(1 - (denominator > 0), 1)
precision_part = precision_part.div_(denominator)
precision_label = torch.ones_like(precision_part)
precision_loss = self.criterion(precision_part, precision_label)
precision_loss = valid_vector * precision_loss
precision_loss = precision_loss.sum() / num_valid
recall_part = torch.sum(pred * vtarget, dim=2)
denominator = torch.sum(vtarget, dim=2)
denominator = denominator.masked_fill_(1 - (denominator > 0), 1)
recall_part = recall_part.div_(denominator)
recall_label = torch.ones_like(recall_part)
recall_loss = self.criterion(recall_part, recall_label)
recall_loss = valid_vector * recall_loss
recall_loss = recall_loss.sum() / num_valid
vtarget = (1 - similarity_gts) * valid_mask
spec_part = torch.sum((1 - pred) * vtarget, dim=2)
denominator = torch.sum(vtarget, dim=2)
denominator = denominator.masked_fill_(1 - (denominator > 0), 1)
spec_part = spec_part.div_(denominator)
spec_label = torch.ones_like(spec_part)
spec_loss = self.criterion(spec_part, spec_label)
spec_loss = valid_vector * spec_loss
spec_loss = spec_loss.sum() / num_valid
loss = bce_loss + recall_loss + spec_loss + precision_loss
return loss
class MaskBCELoss(nn.Module):
def __init__(self, mask, reduction='mean'):
super(MaskBCELoss, self).__init__()
self.mask = mask
self.criterion = torch.nn.BCELoss(reduction='none')
self.reduction = reduction
def forward(self, pred, target):
original_loss = self.criterion(pred, target)
self.mask = self.mask.to(original_loss.get_device())
num_valid = self.mask.sum()
loss = self.mask * original_loss
if self.reduction == 'mean':
loss = loss.sum() / num_valid
return loss
class DiceLoss(nn.Module):
def __init__(self, smooth=1e-5):
super(DiceLoss, self).__init__()
self.smooth = smooth
def forward(self, input, target):
iflat = input.view(-1)
tflat = target.view(-1)
iou = (iflat * tflat).sum()
negtive_iou = ((1 - iflat) * (1 - tflat)).sum()
score = 1 - ((2. * iou + self.smooth) /
(iflat.sum() + tflat.sum() + self.smooth)) - (
(negtive_iou + self.smooth) / (
2 - iflat.sum() - tflat.sum() + self.smooth))
score /= input.size(0)
return score
class DiceLossv2(nn.Module):
def __init__(self, smooth=1e-5):
super(DiceLossv2, self).__init__()
self.smooth = smooth
def forward(self, input, target):
iflat = input.view(-1)
tflat = target.view(-1)
iou = (iflat * tflat).sum()
negtive_iou = ((1 - iflat) * (1 - tflat)).sum()
score = 1 - ((3 * iou * negtive_iou + self.smooth) / (
(negtive_iou * (iflat.sum() + tflat.sum())) + iou * (
(1 - iflat).sum()) + self.smooth))
#
# score = 1 - ((2. * iou + self.smooth) /
# (iflat.sum() + tflat.sum() + self.smooth)) - (
# (negtive_iou + self.smooth) / (
# 2 - iflat.sum() - tflat.sum() + self.smooth))
score /= input.size(0)
return score
class AntimagnetLoss(nn.Module):
def __init__(self, reduction='mean'):
super(AntimagnetLoss, self).__init__()
self.reduction = reduction
self.criterion = torch.nn.BCELoss(reduction=reduction)
def forward(self, pred, target):
diagonal_matrix = (1 - torch.eye(target.size(1))).to(
target.get_device())
vtarget = diagonal_matrix * target
attract_part = torch.sum(pred * vtarget, dim=2)
denominator = torch.sum(vtarget, dim=2)
denominator = denominator.masked_fill_(1 - (denominator > 0), 1)
attract_part = attract_part.div_(denominator)
attract_label = torch.ones_like(attract_part)
attract_loss = self.criterion(attract_part, attract_label)
repel_part = torch.sum((1 - pred) * (1 - target), dim=2)
denominator = torch.sum(1 - target, dim=2)
denominator = denominator.masked_fill_(1 - (denominator > 0), 1)
repel_part = repel_part.div_(denominator)
repel_label = torch.ones_like(repel_part)
repel_loss = self.criterion(repel_part, repel_label)
loss = attract_loss + repel_loss
return loss
class AntimagnetLossv2(nn.Module):
def __init__(self, reduction='mean'):
super(AntimagnetLossv2, self).__init__()
self.reduction = reduction
self.criterion = torch.nn.BCELoss(reduction=reduction)
def forward(self, pred, target):
diagonal_matrix = (1 - torch.eye(target.size(1))).to(
target.get_device())
vtarget = diagonal_matrix * target
attract_part = torch.sum(pred * vtarget, dim=2)
denominator = torch.sum(vtarget, dim=2)
denominator = denominator.masked_fill_(1 - (denominator > 0), 1)
attract_part = attract_part.div(denominator)
attract_label = torch.ones_like(attract_part)
attract_loss = self.criterion(attract_part, attract_label)
repel_part = torch.sum((1 - pred) * (1 - target), dim=2)
denominator = torch.sum(1 - target, dim=2)
denominator = denominator.masked_fill_(1 - (denominator > 0), 1)
repel_part = repel_part.div(denominator)
repel_label = torch.ones_like(repel_part)
repel_loss = self.criterion(repel_part, repel_label)
# print(attract_part, repel_part)
interact_part = 1 - torch.abs(attract_part - repel_part)
interact_label = torch.ones_like(interact_part)
interact_loss = self.criterion(interact_part, interact_label)
loss = attract_loss + repel_loss + interact_loss
return loss
class AntimagnetLossv3(nn.Module):
def __init__(self, reduction='mean'):
super(AntimagnetLossv3, self).__init__()
self.reduction = reduction
self.criterion = torch.nn.BCELoss(reduction=reduction)
def forward(self, pred, target):
diagonal_matrix = (1 - torch.eye(target.size(1))).to(
target.get_device())
vtarget = diagonal_matrix * target
attract_part = pred * vtarget
base_count = (torch.sum(vtarget, dim=2, keepdim=True) * 0.3).long()
base_prob, _ = torch.sort(attract_part, dim=2, descending=True)
base_prob = base_prob.gather(dim=2, index=base_count)
attract_mask = torch.le(attract_part, base_prob).float() * vtarget
attract_part *= attract_mask
attract_part = torch.sum(attract_part, dim=2)
denominator = torch.sum(attract_mask, dim=2)
denominator = denominator.masked_fill_(1 - (denominator > 0), 1)
attract_part = attract_part.div(denominator)
attract_label = torch.ones_like(attract_part)
attract_loss = self.criterion(attract_part, attract_label)
repel_part = (1 - pred) * (1 - target)
base_count = (torch.sum(1 - target, dim=2, keepdim=True) * 0.3).long()
base_prob, _ = torch.sort(repel_part, dim=2, descending=True)
base_prob = base_prob.gather(dim=2, index=base_count)
repel_mask = torch.le(repel_part, base_prob).float() * (1 - target)
repel_part *= repel_mask
repel_part = torch.sum(repel_part, dim=2)
denominator = torch.sum(repel_mask, dim=2)
denominator = denominator.masked_fill_(1 - (denominator > 0), 1)
repel_part = repel_part.div(denominator)
repel_label = torch.ones_like(repel_part)
repel_loss = self.criterion(repel_part, repel_label)
loss = attract_loss + repel_loss
return loss
class AntimagnetLossv4(nn.Module):
def __init__(self, reduction='mean'):
super(AntimagnetLossv4, self).__init__()
self.reduction = reduction
self.criterion = torch.nn.BCELoss(reduction=reduction)
def forward(self, pred, target):
diagonal_matrix = (1 - torch.eye(target.size(1))).to(
target.get_device())
vtarget = diagonal_matrix * target
attract_part = torch.sum(pred * vtarget)
denominator = torch.sum(vtarget)
denominator = 1 if denominator == 0 else denominator
attract_part = attract_part.div(denominator)
attract_label = torch.ones_like(attract_part)
attract_loss = self.criterion(attract_part, attract_label)
repel_part = torch.sum((1 - pred) * (1 - target))
denominator = torch.sum(1 - target)
denominator = 1 if denominator == 0 else denominator
repel_part = repel_part.div(denominator)
repel_label = torch.ones_like(repel_part)
repel_loss = self.criterion(repel_part, repel_label)
loss = attract_loss + repel_loss
return loss
class AntimagnetLossv5(nn.Module):
def __init__(self, reduction='mean'):
super(AntimagnetLossv5, self).__init__()
self.reduction = reduction
self.criterion = torch.nn.BCELoss(reduction=reduction)
def forward(self, pred, target):
diagonal_matrix = (1 - torch.eye(target.size(1))).to(
target.get_device())
vtarget = diagonal_matrix * target
attract_part = torch.sum(pred * vtarget, dim=2)
denominator = torch.sum(vtarget, dim=2)
denominator = denominator.masked_fill_(1 - (denominator > 0), 1)
attract_part = attract_part.div(denominator)
attract_label = torch.ones_like(attract_part)
attract_loss = self.criterion(attract_part, attract_label)
repel_part = (1 - pred) * (1 - target)
repel_part = torch.max(repel_part - 0.5, torch.zeros_like(repel_part))
repel_part = torch.sum(repel_part, dim=2)
denominator = torch.sum(1 - target, dim=2)
denominator = denominator.masked_fill_(1 - (denominator > 0), 1)
repel_part = repel_part.div(denominator)
repel_label = torch.ones_like(repel_part)
repel_loss = self.criterion(repel_part, repel_label)
loss = attract_loss + repel_loss
return loss
class AntimagnetLossv6(nn.Module):
def __init__(self, reduction='mean'):
super(AntimagnetLossv6, self).__init__()
self.reduction = reduction
self.criterion = torch.nn.BCELoss(reduction=reduction)
def forward(self, pred, target):
diagonal_matrix = (1 - torch.eye(target.size(1))).to(
target.get_device())
vtarget = diagonal_matrix * target
recall_part = torch.sum(pred * vtarget, dim=2)
denominator = torch.sum(vtarget, dim=2)
denominator = denominator.masked_fill_(1 - (denominator > 0), 1)
recall_part = recall_part.div_(denominator)
recall_label = torch.ones_like(recall_part)
recall_loss = self.criterion(recall_part, recall_label)
spec_part = torch.sum((1 - pred) * (1 - target), dim=2)
denominator = torch.sum(1 - target, dim=2)
denominator = denominator.masked_fill_(1 - (denominator > 0), 1)
spec_part = spec_part.div_(denominator)
spec_label = torch.ones_like(spec_part)
spec_loss = self.criterion(spec_part, spec_label)
precision_part = torch.sum(pred * vtarget, dim=2)
denominator = torch.sum(pred, dim=2)
denominator = denominator.masked_fill_(1 - (denominator > 0), 1)
precision_part = precision_part.div_(denominator)
precision_label = torch.ones_like(precision_part)
precision_loss = self.criterion(precision_part, precision_label)
loss = recall_loss + spec_loss + precision_loss
return loss
class AntimagnetLossv7(nn.Module):
def __init__(self, reduction='mean'):
super(AntimagnetLossv7, self).__init__()
self.reduction = reduction
self.criterion = torch.nn.BCELoss(reduction=reduction)
def forward(self, pred, target):
bce_loss = self.criterion(pred, target)
diagonal_matrix = (1 - torch.eye(target.size(1))).to(
target.get_device())
vtarget = diagonal_matrix * target
recall_part = torch.sum(pred * vtarget, dim=2)
denominator = torch.sum(vtarget, dim=2)
denominator = denominator.masked_fill_(1 - (denominator > 0), 1)
recall_part = recall_part.div_(denominator)
recall_label = torch.ones_like(recall_part)
recall_loss = self.criterion(recall_part, recall_label)
spec_part = torch.sum((1 - pred) * (1 - target), dim=2)
denominator = torch.sum(1 - target, dim=2)
denominator = denominator.masked_fill_(1 - (denominator > 0), 1)
spec_part = spec_part.div_(denominator)
spec_label = torch.ones_like(spec_part)
spec_loss = self.criterion(spec_part, spec_label)
precision_part = torch.sum(pred * vtarget, dim=2)
denominator = torch.sum(pred, dim=2)
denominator = denominator.masked_fill_(1 - (denominator > 0), 1)
precision_part = precision_part.div_(denominator)
precision_label = torch.ones_like(precision_part)
precision_loss = self.criterion(precision_part, precision_label)
loss = bce_loss + recall_loss + spec_loss + precision_loss
return loss
| 40.122951
| 81
| 0.60617
| 2,972
| 24,475
| 4.750336
| 0.066622
| 0.038674
| 0.036124
| 0.025499
| 0.798413
| 0.77681
| 0.762998
| 0.749398
| 0.71887
| 0.711149
| 0
| 0.02663
| 0.281961
| 24,475
| 609
| 82
| 40.188834
| 0.776716
| 0.018345
| 0
| 0.702083
| 0
| 0
| 0.004372
| 0
| 0
| 0
| 0
| 0.001642
| 0
| 1
| 0.066667
| false
| 0
| 0.014583
| 0
| 0.147917
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
4361210dec81b69d21208a86186f6252ab59c80a
| 54
|
py
|
Python
|
tests/core/test_import.py
|
pipermerriam/eth-orm
|
b7353e24357f133ae5cf63727cace92fd45d1867
|
[
"MIT"
] | 1
|
2021-01-16T08:54:04.000Z
|
2021-01-16T08:54:04.000Z
|
tests/core/test_import.py
|
pipermerriam/eth-orm
|
b7353e24357f133ae5cf63727cace92fd45d1867
|
[
"MIT"
] | null | null | null |
tests/core/test_import.py
|
pipermerriam/eth-orm
|
b7353e24357f133ae5cf63727cace92fd45d1867
|
[
"MIT"
] | null | null | null |
def test_import():
import eth_orm # noqa: F401
| 10.8
| 32
| 0.648148
| 8
| 54
| 4.125
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.075
| 0.259259
| 54
| 4
| 33
| 13.5
| 0.75
| 0.185185
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 1
| 0
| 1.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
4367883ef8a85f3ebd2c8fc60711a24b5eaa87d3
| 9,681
|
py
|
Python
|
tests/test_juniper_legacy.py
|
bkresoja/splunk-connect-for-syslog
|
2710ad9d7c7881ceef7cb57577311e819138ed34
|
[
"BSD-2-Clause",
"CC0-1.0"
] | null | null | null |
tests/test_juniper_legacy.py
|
bkresoja/splunk-connect-for-syslog
|
2710ad9d7c7881ceef7cb57577311e819138ed34
|
[
"BSD-2-Clause",
"CC0-1.0"
] | null | null | null |
tests/test_juniper_legacy.py
|
bkresoja/splunk-connect-for-syslog
|
2710ad9d7c7881ceef7cb57577311e819138ed34
|
[
"BSD-2-Clause",
"CC0-1.0"
] | null | null | null |
# Copyright 2019 Splunk, Inc.
#
# Use of this source code is governed by a BSD-2-clause-style
# license that can be found in the LICENSE-BSD2 file or at
# https://opensource.org/licenses/BSD-2-Clause
from jinja2 import Environment
from .sendmessage import *
from .splunkutils import *
env = Environment(extensions=['jinja2_time.TimeExtension'])
# <134> Aug 02 14:45:04 10.0.0.1 65.197.254.193 20090320, 17331, 2009/03/20 14:47:45, 2009/03/20 14:47:50, global, 53, [FW NAME], [FW IP], traffic, traffic log, trust, (NULL), 10.1.1.20, 1725, 82.2.19.2, 2383, untrust, (NULL), 84.5.78.4, 80, 84.53.178.64, 80, tcp, global, 53, [FW NAME], fw/vpn, 4, accepted, info, no, Creation, (NULL), (NULL), (NULL), 0, 0, 0, 0, 0, 0, 0, 1, no, 0, Not Set, sos
def test_juniper_nsm_standard(record_property, setup_wordlist, get_host_key, setup_splunk, setup_sc4s):
host = get_host_key
mt = env.from_string(
"{{ mark }} {% now 'local', '%b %d %H:%M:%S' %} jnpnsm-{{ host }} 65.197.254.193 20090320, 17331, 2009/03/20 14:47:45, 2009/03/20 14:47:50, global, 53, [FW NAME], [FW IP], traffic, traffic log, trust, (NULL), 10.1.1.20, 1725, 82.2.19.2, 2383, untrust, (NULL), 84.5.78.4, 80, 84.53.178.64, 80, tcp, global, 53, [FW NAME], fw/vpn, 4, accepted, info, no, Creation, (NULL), (NULL), (NULL), 0, 0, 0, 0, 0, 0, 0, 1, no, 0, Not Set, sos")
message = mt.render(mark="<134>", host=host)
sendsingle(message, setup_sc4s[0], setup_sc4s[1][514])
st = env.from_string("search earliest=-1m@m latest=+1m@m index=netfw host=\"jnpnsm-{{ host }}\" sourcetype=\"juniper:nsm\" | head 2")
search = st.render(host=host)
resultCount, eventCount = splunk_single(setup_splunk, search)
record_property("host", host)
record_property("resultCount", resultCount)
record_property("message", message)
assert resultCount == 1
# THE LOG SAMPLE BELOW IS IMPLIED FROM THE JUNIPER DOCS; need to obtain a real sample.
# <134> Aug 02 14:45:04 10.0.0.1 65.197.254.193 20090320, 17331, 2009/03/20 14:47:45, 2009/03/20 14:47:50, global, 53, [IDP NAME], [IDP IP], predefined, rule, trust, (NULL), 10.1.1.20, 1725, 82.2.19.2, 2383, untrust, (NULL), 84.5.78.4, 80, 84.53.178.64, 80, tcp, global, 53, [IDP NAME], fw/vpn, 4, accepted, info, no, Creation, (NULL), (NULL), (NULL), 0, 0, 0, 0, 0, 0, 0, 1, no, 0, Not Set, sos
def test_juniper_nsm_idp_standard(record_property, setup_wordlist, get_host_key, setup_splunk, setup_sc4s):
host = get_host_key
mt = env.from_string(
"{{ mark }} {% now 'local', '%b %d %H:%M:%S' %} jnpnsmidp-{{ host }} 65.197.254.193 20090320, 17331, 2009/03/20 14:47:45, 2009/03/20 14:47:50, global, 53, [IDP NAME], [IDP IP], predefined, rule, trust, (NULL), 10.1.1.20, 1725, 82.2.19.2, 2383, untrust, (NULL), 84.5.78.4, 80, 84.53.178.64, 80, tcp, global, 53, [IDP NAME], fw/vpn, 4, accepted, info, no, Creation, (NULL), (NULL), (NULL), 0, 0, 0, 0, 0, 0, 0, 1, no, 0, Not Set, sos")
message = mt.render(mark="<134>", host=host)
sendsingle(message, setup_sc4s[0], setup_sc4s[1][514])
st = env.from_string("search earliest=-1m@m latest=+1m@m index=netids host=\"jnpnsmidp-{{ host }}\" sourcetype=\"juniper:nsm:idp\" | head 2")
search = st.render(host=host)
resultCount, eventCount = splunk_single(setup_splunk, search)
record_property("host", host)
record_property("resultCount", resultCount)
record_property("message", message)
assert resultCount == 1
# <23> Apr 24 12:30:05 cs-loki3 RT_IDP: IDP_ATTACK_LOG_EVENT: IDP: at 1303673404, ANOMALY Attack log <64.1.2.1/48397->198.87.233.110/80> for TCP protocol and service HTTP application NONE by rule 3 of rulebase IPS in policy Recommended. attack: repeat=0, action=DROP, threat-severity=HIGH, name=HTTP:INVALID:MSNG-HTTP-VER, NAT <46.0.3.254:55870->0.0.0.0:0>, time-elapsed=0, inbytes=0, outbytes=0, inpackets=0, outpackets=0, intf:trust:fe-0/0/2.0->untrust:fe-0/0/3.0, packet-log-id: 0 and misc-message -
# <23> Mar 18 17:56:52 [FW IP] [FW Model]: NetScreen device_id=netscreen2 [Root]system-notification-00257(traffic): start_time="2009-03-18 16:07:06" duration=0 policy_id=320001 service=msrpc Endpoint Mapper(tcp) proto=6 src zone=Null dst zone=self action=Deny sent=0 rcvd=16384 src=21.10.90.125 dst=23.16.1.1
def test_juniper_netscreen_fw(record_property, setup_wordlist, get_host_key, setup_splunk, setup_sc4s):
host = get_host_key
mt = env.from_string(
"{{ mark }} {% now 'local', '%b %d %H:%M:%S' %} jnpns-{{ host }} ns204: NetScreen device_id=netscreen2 [Root]system-notification-00257(traffic): start_time=\"2009-03-18 16:07:06\" duration=0 policy_id=320001 service=msrpc Endpoint Mapper(tcp) proto=6 src zone=Null dst zone=self action=Deny sent=0 rcvd=16384 src=21.10.90.125 dst=23.16.1.1\n")
message = mt.render(mark="<23>", host=host)
sendsingle(message, setup_sc4s[0], setup_sc4s[1][514])
st = env.from_string("search earliest=-1m@m latest=+1m@m index=netfw host=\"jnpns-{{ host }}\" sourcetype=\"netscreen:firewall\" | head 2")
search = st.render(host=host)
resultCount, eventCount = splunk_single(setup_splunk, search)
record_property("host", host)
record_property("resultCount", resultCount)
record_property("message", message)
assert resultCount == 1
# <165>1 2010-06-23T18:05:55 10.209.83.9 Jnpr Syslog 23414 1 [syslog@juniper.net dayId="20100623" recordId="0" timeRecv="2010/06/23 18:05:55" timeGen="2010/06/23 18:05:51" domain="" devDomVer2="0" device_ip="10.209.83.9" cat="Config" attack="" srcZn="NULL" srcIntf="" srcAddr="0.0.0.0" srcPort="0" natSrcAddr="NULL" natSrcPort="0" dstZn="NULL" dstIntf="NULL" dstAddr="0.0.0.0" dstPort="0" natDstAddr="NULL" natDstPort="0" protocol="IP" ruleDomain="" ruleVer="0" policy="" rulebase="NONE" ruleNo="0" action="NONE" severity="INFO" alert="no" elaspedTime="0" inbytes="0" outbytes="0" totBytes="0" inPak="0" outPak="0" totPak="0" repCount="0" packetData="no" varEnum="0" misc="Interaface eth2,eth3 is in Normal State" user="NULL" app="NULL" uri="NULL"]
# <THIS TEST IS TENTATIVE PENDING A VALID DATA SAMPLE; NEEDED TO OMIT THE "1" IN THIS TEST SAMPLE (BEFORE [] BLOCK) TO GET IT TO PARSE 5424>
# <VALIDATE BEFORE SHIPPING!>
# <THIS TEST MAY NEED TO BE REWRITTEN AS A "STANDARD" TEST IF THE DATA IS ACTUALLY SENT IN 3164 FORMAT>
# @pytest.mark.xfail
def test_juniper_idp_structured(record_property, setup_wordlist, get_host_key, setup_splunk, setup_sc4s):
host = get_host_key
mt = env.from_string(
"{{ mark }} {% now 'utc', '%Y-%m-%dT%H:%M:%S' %}.700Z {{ host }} Jnpr Syslog 23414 [syslog@juniper.net dayId=\"20100623\" recordId=\"0\" timeRecv=\"2010/06/23 18:05:55\" timeGen=\"2010/06/23 18:05:51\" domain=\"\" devDomVer2=\"0\" device_ip=\"10.209.83.9\" cat=\"Config\" attack=\"\" srcZn=\"NULL\" srcIntf=\"\" srcAddr=\"0.0.0.0\" srcPort=\"0\" natSrcAddr=\"NULL\" natSrcPort=\"0\" dstZn=\"NULL\" dstIntf=\"NULL\" dstAddr=\"0.0.0.0\" dstPort=\"0\" natDstAddr=\"NULL\" natDstPort=\"0\" protocol=\"IP\" ruleDomain=\"\" ruleVer=\"0\" policy=\"\" rulebase=\"NONE\" ruleNo=\"0\" action=\"NONE\" severity=\"INFO\" alert=\"no\" elaspedTime=\"0\" inbytes=\"0\" outbytes=\"0\" totBytes=\"0\" inPak=\"0\" outPak=\"0\" totPak=\"0\" repCount=\"0\" packetData=\"no\" varEnum=\"0\" misc=\"Interaface eth2,eth3 is in Normal State\" user=\"NULL\" app=\"NULL\" uri=\"NULL\"]")
message = mt.render(mark="<165>1", host=host)
sendsingle(message, setup_sc4s[0], setup_sc4s[1][514])
st = env.from_string("search earliest=-1m@m latest=+1m@m index=netids host=\"{{ host }}\" sourcetype=\"juniper:idp\" | head 2")
search = st.render(host=host)
resultCount, eventCount = splunk_single(setup_splunk, search)
record_property("host", host)
record_property("resultCount", resultCount)
record_property("message", message)
assert resultCount == 1
# <23> Apr 24 12:30:05 cs-loki3 RT_IDP: IDP_ATTACK_LOG_EVENT: IDP: at 1303673404, ANOMALY Attack log <64.1.2.1/48397->198.87.233.110/80> for TCP protocol and service HTTP application NONE by rule 3 of rulebase IPS in policy Recommended. attack: repeat=0, action=DROP, threat-severity=HIGH, name=HTTP:INVALID:MSNG-HTTP-VER, NAT <46.0.3.254:55870->0.0.0.0:0>, time-elapsed=0, inbytes=0, outbytes=0, inpackets=0, outpackets=0, intf:trust:fe-0/0/2.0->untrust:fe-0/0/3.0, packet-log-id: 0 and misc-message -
# <23> Mar 18 17:56:52 [FW IP] [FW Model]: NetScreen device_id=netscreen2 [Root]system-notification-00257(traffic): start_time="2009-03-18 16:07:06" duration=0 policy_id=320001 service=msrpc Endpoint Mapper(tcp) proto=6 src zone=Null dst zone=self action=Deny sent=0 rcvd=16384 src=21.10.90.125 dst=23.16.1.1
def test_juniper_netscreen_fw_singleport(record_property, setup_wordlist, get_host_key, setup_splunk, setup_sc4s):
host = get_host_key
mt = env.from_string(
"{{ mark }} {% now 'local', '%b %d %H:%M:%S' %} {{ host }} ns204: NetScreen device_id=netscreen2 [Root]system-notification-00257(traffic): start_time=\"2009-03-18 16:07:06\" duration=0 policy_id=320001 service=msrpc Endpoint Mapper(tcp) proto=6 src zone=Null dst zone=self action=Deny sent=0 rcvd=16384 src=21.10.90.125 dst=23.16.1.1 singleport=5000\n")
message = mt.render(mark="<23>", host=host)
sendsingle(message, setup_sc4s[0], setup_sc4s[1][5000])
st = env.from_string("search earliest=-1m@m latest=+1m@m index=netfw host=\"{{ host }}\" sourcetype=\"netscreen:firewall\" | head 2")
search = st.render(host=host)
resultCount, eventCount = splunk_single(setup_splunk, search)
record_property("host", host)
record_property("resultCount", resultCount)
record_property("message", message)
assert resultCount == 1
| 76.833333
| 869
| 0.688152
| 1,624
| 9,681
| 4.025246
| 0.196429
| 0.015298
| 0.015603
| 0.014686
| 0.875019
| 0.875019
| 0.875019
| 0.875019
| 0.875019
| 0.875019
| 0
| 0.132071
| 0.132631
| 9,681
| 125
| 870
| 77.448
| 0.646421
| 0.384155
| 0
| 0.695652
| 0
| 0.101449
| 0.434724
| 0.018001
| 0
| 0
| 0
| 0
| 0.072464
| 1
| 0.072464
| false
| 0
| 0.043478
| 0
| 0.115942
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
4376094138704e9557fbccd18eadae01200057f6
| 22
|
py
|
Python
|
dmonpoint/__init__.py
|
dice-project/DICE-Anomaly-Detection-Tool
|
a5eeacb9e888348adbe97be0c26a500f2f03ec6f
|
[
"Apache-2.0"
] | 4
|
2017-02-06T15:33:06.000Z
|
2018-05-08T01:43:03.000Z
|
dmonpoint/__init__.py
|
dice-project/DICE-Anomaly-Detection-Tool
|
a5eeacb9e888348adbe97be0c26a500f2f03ec6f
|
[
"Apache-2.0"
] | null | null | null |
dmonpoint/__init__.py
|
dice-project/DICE-Anomaly-Detection-Tool
|
a5eeacb9e888348adbe97be0c26a500f2f03ec6f
|
[
"Apache-2.0"
] | null | null | null |
from adppoint import *
| 22
| 22
| 0.818182
| 3
| 22
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.136364
| 22
| 1
| 22
| 22
| 0.947368
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
43af7abdc21c53798382d8ee2c48a950fadb6a80
| 18
|
py
|
Python
|
tests/broke_import.py
|
BachelorForever/FuckitPy
|
1a7295b318816e3cae68f46956710dbcdf5700fe
|
[
"WTFPL"
] | null | null | null |
tests/broke_import.py
|
BachelorForever/FuckitPy
|
1a7295b318816e3cae68f46956710dbcdf5700fe
|
[
"WTFPL"
] | null | null | null |
tests/broke_import.py
|
BachelorForever/FuckitPy
|
1a7295b318816e3cae68f46956710dbcdf5700fe
|
[
"WTFPL"
] | null | null | null |
p=np?
fuck3='123'
| 6
| 11
| 0.611111
| 4
| 18
| 2.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.25
| 0.111111
| 18
| 3
| 11
| 6
| 0.4375
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
43b7e04cc18f84ea3ce0ac58b5afc9f05e79458b
| 3,865
|
py
|
Python
|
tests/test_codegen_ppac.py
|
hgyhungry/heterocl
|
4ee0a9345404d808ce939f9c2cb5143392457042
|
[
"Apache-2.0"
] | null | null | null |
tests/test_codegen_ppac.py
|
hgyhungry/heterocl
|
4ee0a9345404d808ce939f9c2cb5143392457042
|
[
"Apache-2.0"
] | null | null | null |
tests/test_codegen_ppac.py
|
hgyhungry/heterocl
|
4ee0a9345404d808ce939f9c2cb5143392457042
|
[
"Apache-2.0"
] | null | null | null |
import heterocl as hcl
import hlib
def test_func_print():
def test_hmm_sim():
hcl.init()
x = hcl.placeholder((1,), 'x', dtype=hcl.UInt(64))
y = hcl.placeholder((64,), 'y', dtype=hcl.UInt(64))
def kernel(X, Y):
return hlib.ppac.hmm_sim(X, Y, name='Z')
s = hcl.create_schedule([x, y], kernel)
f = hcl.build(s, target='rv64_ppac')
code = str(f)
assert 'PPACFunc_HmmSim' in code
def test_gemm_binary():
hcl.init()
data = hcl.placeholder((64, 64), 'd', dtype=hcl.UInt(1))
weight = hcl.placeholder((64, 64), 'w', dtype=hcl.UInt(1))
def kernel(d, w):
return hlib.ppac.gemm_binary(d, w, 'res')
s = hcl.create_schedule([data, weight], kernel)
f = hcl.build(s, target='rv64_ppac')
code = str(f)
assert 'PPACFunc_GeMMBin' in code
def test_gemm_multi_bit_unsigned():
hcl.init()
data = hcl.placeholder((32, 32), 'd', dtype=hcl.UInt(8))
weight = hcl.placeholder((32, 32), 'w', dtype=hcl.UInt(8))
def kernel(d, w):
return hlib.ppac.gemm_multi_bit(d, w, 'res')
s = hcl.create_schedule([data, weight], kernel)
f = hcl.build(s, target='rv64_ppac')
code = str(f)
assert 'PPACFunc_GeMMUInt' in code
def test_gemm_multi_bit_signed():
hcl.init()
data = hcl.placeholder((32, 32), 'd', dtype=hcl.Int(8))
weight = hcl.placeholder((32, 32), 'w', dtype=hcl.Int(8))
def kernel(d, w):
return hlib.ppac.gemm_multi_bit(d, w, 'res')
s = hcl.create_schedule([data, weight], kernel)
f = hcl.build(s, target='rv64_ppac')
code = str(f)
assert 'PPACFunc_GeMMSInt' in code
test_hmm_sim()
test_gemm_binary()
test_gemm_multi_bit_unsigned()
test_gemm_multi_bit_signed()
def test_tile():
def test_hmm_sim():
hcl.init()
b_n = 10
d_n = 256
X = hcl.placeholder((b_n,), 'X', dtype=hcl.UInt(64))
Y = hcl.placeholder((d_n,), 'Y', dtype=hcl.UInt(64))
def kernel(X, Y):
return hlib.ppac.hmm_sim(X, Y, name='Z')
s = hcl.create_schedule([X, Y], kernel)
ir = str(hcl.lower(s))
assert ('\"_batch_num\"=' + str(b_n)) in ir
assert ('\"_in_block_num\"=' + str(1)) in ir
assert ('\"_out_channel_num\"=' + str(d_n)) in ir
def test_gemm_binary():
hcl.init()
b_n, i_c, o_c = 64, 256, 256
ppac_config = hlib.ppac.PPAC_config(multi_bit=False)
data = hcl.placeholder((b_n, i_c), 'd', dtype=hcl.UInt(1))
weight = hcl.placeholder((o_c, i_c), 'w', dtype=hcl.UInt(1))
def kernel(d, w):
return hlib.ppac.gemm_binary(d, w, 'res')
s = hcl.create_schedule([data, weight], kernel)
ir = str(hcl.lower(s))
assert ('\"_batch_num\"=' + str(b_n)) in ir
assert ('\"_in_block_num\"=' + str(i_c // ppac_config.elem_num)) in ir
assert ('\"_out_channel_num\"=' + str(o_c)) in ir
def test_gemm_multi_bit():
hcl.init()
b_n, i_c, o_c = 64, 256, 256
ppac_config = hlib.ppac.PPAC_config(multi_bit=True)
data = hcl.placeholder((b_n, i_c), 'd', dtype=hcl.Int(8))
weight = hcl.placeholder((o_c, i_c), 'w', dtype=hcl.Int(8))
def kernel(d, w):
return hlib.ppac.gemm_multi_bit(d, w, 'res')
s = hcl.create_schedule([data, weight], kernel)
ir = str(hcl.lower(s))
assert ('\"_batch_num\"=' + str(b_n)) in ir
assert ('\"_in_block_num\"=' + str(i_c // ppac_config.elem_num)) in ir
assert ('\"_out_channel_num\"=' + str(o_c)) in ir
test_hmm_sim()
test_gemm_binary()
test_gemm_multi_bit()
| 39.040404
| 79
| 0.550323
| 572
| 3,865
| 3.498252
| 0.127622
| 0.097951
| 0.05997
| 0.062969
| 0.917541
| 0.872064
| 0.831584
| 0.793603
| 0.74063
| 0.711144
| 0
| 0.027283
| 0.288745
| 3,865
| 99
| 80
| 39.040404
| 0.700618
| 0
| 0
| 0.6
| 0
| 0
| 0.078025
| 0.01672
| 0
| 0
| 0
| 0
| 0.144444
| 1
| 0.177778
| false
| 0
| 0.022222
| 0.077778
| 0.277778
| 0.011111
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
78df51de062a404596927ae48aa500e626145597
| 102
|
py
|
Python
|
unpoly/__init__.py
|
thinkwelltwd/unpoly_django
|
2da514e8fbdf254e7dadbe0f73bee62c51aa579b
|
[
"MIT"
] | 4
|
2021-07-03T06:10:36.000Z
|
2022-03-26T02:08:51.000Z
|
unpoly/__init__.py
|
thinkwelltwd/unpoly_django
|
2da514e8fbdf254e7dadbe0f73bee62c51aa579b
|
[
"MIT"
] | null | null | null |
unpoly/__init__.py
|
thinkwelltwd/unpoly_django
|
2da514e8fbdf254e7dadbe0f73bee62c51aa579b
|
[
"MIT"
] | null | null | null |
__version_info__ = __version__ = version = VERSION = '0.1.0'
def get_version():
return version
| 14.571429
| 60
| 0.696078
| 13
| 102
| 4.692308
| 0.538462
| 0.459016
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.036585
| 0.196078
| 102
| 6
| 61
| 17
| 0.707317
| 0
| 0
| 0
| 0
| 0
| 0.049505
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.333333
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
78e4559279858cb86ce9e8dc5bdb7a4cc18a2f39
| 37
|
py
|
Python
|
__init__.py
|
cgons/dbinspector
|
358f5bd091ab29f462654f713adf9f54a180365d
|
[
"MIT"
] | null | null | null |
__init__.py
|
cgons/dbinspector
|
358f5bd091ab29f462654f713adf9f54a180365d
|
[
"MIT"
] | null | null | null |
__init__.py
|
cgons/dbinspector
|
358f5bd091ab29f462654f713adf9f54a180365d
|
[
"MIT"
] | null | null | null |
from .dbinspector import DBInspector
| 18.5
| 36
| 0.864865
| 4
| 37
| 8
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.108108
| 37
| 1
| 37
| 37
| 0.969697
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
60214d10d1ecac140607347aa775512421903b74
| 201
|
py
|
Python
|
etiqette/admin.py
|
peterken674/etiqette
|
12437615ae1fcdf2a5e01dd88111880ca8e76776
|
[
"MIT"
] | null | null | null |
etiqette/admin.py
|
peterken674/etiqette
|
12437615ae1fcdf2a5e01dd88111880ca8e76776
|
[
"MIT"
] | null | null | null |
etiqette/admin.py
|
peterken674/etiqette
|
12437615ae1fcdf2a5e01dd88111880ca8e76776
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import Profile, Ticket, Cinema, Session
admin.site.register(Profile)
admin.site.register(Ticket)
admin.site.register(Cinema)
admin.site.register(Session)
| 25.125
| 52
| 0.81592
| 28
| 201
| 5.857143
| 0.428571
| 0.219512
| 0.414634
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.079602
| 201
| 7
| 53
| 28.714286
| 0.886486
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
6030435780d692090427e69c873772954ebdbf26
| 152
|
py
|
Python
|
tests/spec/config.py
|
cfm-art/selenium-docker
|
191e2591db2dfc9fa664ade84451f74d3a43db89
|
[
"MIT"
] | null | null | null |
tests/spec/config.py
|
cfm-art/selenium-docker
|
191e2591db2dfc9fa664ade84451f74d3a43db89
|
[
"MIT"
] | null | null | null |
tests/spec/config.py
|
cfm-art/selenium-docker
|
191e2591db2dfc9fa664ade84451f74d3a43db89
|
[
"MIT"
] | null | null | null |
# coding: utf-8
class Config(object):
def __init__(self):
pass
def initialize(self):
pass
def exit(self):
pass
| 10.857143
| 25
| 0.539474
| 18
| 152
| 4.333333
| 0.666667
| 0.307692
| 0.282051
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010309
| 0.361842
| 152
| 13
| 26
| 11.692308
| 0.793814
| 0.085526
| 0
| 0.428571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.428571
| false
| 0.428571
| 0
| 0
| 0.571429
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 6
|
60b7ca27a0cf54cf90842fdf815a43d85b60cd5e
| 3,497
|
py
|
Python
|
tests/unordered/substitutor/test_unordered_substitution.py
|
nikitanovosibirsk/district42-exp-types
|
e36e43da62f32d58d4b14c65afa16856dc8849e1
|
[
"Apache-2.0"
] | null | null | null |
tests/unordered/substitutor/test_unordered_substitution.py
|
nikitanovosibirsk/district42-exp-types
|
e36e43da62f32d58d4b14c65afa16856dc8849e1
|
[
"Apache-2.0"
] | 2
|
2021-08-01T05:02:21.000Z
|
2021-08-01T10:06:28.000Z
|
tests/unordered/substitutor/test_unordered_substitution.py
|
nikitanovosibirsk/district42-exp-types
|
e36e43da62f32d58d4b14c65afa16856dc8849e1
|
[
"Apache-2.0"
] | null | null | null |
from typing import Any, List
from unittest.mock import sentinel
import pytest
from baby_steps import given, then, when
from district42 import schema
from pytest import raises
from revolt import substitute
from revolt.errors import SubstitutionError
from district42_exp_types.unordered import unordered_schema
@pytest.mark.parametrize("value", [
[],
[1],
[1, 2],
])
def test_unordered_elements_substitution(value: List[Any]):
with given:
sch = unordered_schema
with when:
res = substitute(sch, value)
with then:
assert res == unordered_schema([substitute(schema.int, x) for x in value])
assert res != sch
def test_unordered_elements_substitution_error():
with given:
sch = unordered_schema
with when, raises(Exception) as exception:
substitute(sch, [sentinel])
with then:
assert exception.type is SubstitutionError
def test_unordered_len_substitution():
with given:
sch = unordered_schema.len(2)
with when:
res = substitute(sch, [1, 2])
with then:
assert res == unordered_schema([schema.int(1), schema.int(2)]).len(2)
assert res != sch
@pytest.mark.parametrize("value", [
[1],
[1, 2, 3],
])
def test_unordered_len_substitution_error(value: List[Any]):
with given:
sch = unordered_schema.len(2)
with when, raises(Exception) as exception:
substitute(sch, value)
with then:
assert exception.type is SubstitutionError
@pytest.mark.parametrize("value", [
[1, 2],
[1, 2, 3],
])
def test_unordered_min_len_substitution(value: Any):
with given:
sch = unordered_schema.len(2, ...)
with when:
res = substitute(sch, value)
with then:
assert res == unordered_schema([substitute(schema.int, x) for x in value]).len(2, ...)
assert res != sch
def test_unordered_min_len_substitution_error():
with given:
sch = unordered_schema.len(2, ...)
with when, raises(Exception) as exception:
substitute(sch, [1])
with then:
assert exception.type is SubstitutionError
@pytest.mark.parametrize("value", [
[],
[1],
[1, 2],
])
def test_unordered_max_len_substitution(value: List[Any]):
with given:
sch = unordered_schema.len(..., 2)
with when:
res = substitute(sch, value)
with then:
assert res == unordered_schema([substitute(schema.int, x) for x in value]).len(..., 2)
assert res != sch
def test_unordered_max_len_substitution_error():
with given:
sch = unordered_schema.len(..., 2)
with when, raises(Exception) as exception:
substitute(sch, [1, 2, 3])
with then:
assert exception.type is SubstitutionError
@pytest.mark.parametrize("value", [
[1],
[1, 2],
[1, 2, 3],
])
def test_unordered_min_max_len_substitution(value: List[Any]):
with given:
sch = unordered_schema.len(1, 3)
with when:
res = substitute(sch, value)
with then:
assert res == unordered_schema([substitute(schema.int, x) for x in value]).len(1, 3)
assert res != sch
@pytest.mark.parametrize("value", [
[],
[1, 2, 3, 4],
])
def test_unordered_min_max_len_substitution_error(value: List[Any]):
with given:
sch = unordered_schema.len(1, 3)
with when, raises(Exception) as exception:
substitute(sch, value)
with then:
assert exception.type is SubstitutionError
| 22.707792
| 94
| 0.645124
| 446
| 3,497
| 4.919283
| 0.11435
| 0.109389
| 0.072926
| 0.095716
| 0.861896
| 0.838651
| 0.793528
| 0.741568
| 0.684594
| 0.640839
| 0
| 0.019564
| 0.23992
| 3,497
| 153
| 95
| 22.856209
| 0.805869
| 0
| 0
| 0.738739
| 0
| 0
| 0.008579
| 0
| 0
| 0
| 0
| 0
| 0.135135
| 1
| 0.09009
| false
| 0
| 0.081081
| 0
| 0.171171
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
719f86c8822ad5b6835564ee14cf88041a73db3f
| 113
|
py
|
Python
|
ksig/__init__.py
|
tgcsaba/ksig
|
bef68abeddb268d4166b4db0953f8dce9b8c36d1
|
[
"Apache-2.0"
] | 8
|
2021-05-22T14:38:13.000Z
|
2021-07-14T12:44:39.000Z
|
ksig/__init__.py
|
tgcsaba/ksig
|
bef68abeddb268d4166b4db0953f8dce9b8c36d1
|
[
"Apache-2.0"
] | null | null | null |
ksig/__init__.py
|
tgcsaba/ksig
|
bef68abeddb268d4166b4db0953f8dce9b8c36d1
|
[
"Apache-2.0"
] | 2
|
2021-06-03T13:31:41.000Z
|
2021-06-30T10:03:32.000Z
|
from . import algorithms
from . import static
from . import kernels
from . import projections
from . import utils
| 22.6
| 25
| 0.787611
| 15
| 113
| 5.933333
| 0.466667
| 0.561798
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.168142
| 113
| 5
| 26
| 22.6
| 0.946809
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
e0b47f8a0b95c87f03c91eb667040767bd93a10a
| 256
|
py
|
Python
|
rl_utils/__init__.py
|
StuartCHAN/KARL
|
2a4bb39d2db7646f57e66bda7c6694ba33022f76
|
[
"MIT"
] | 1
|
2019-10-13T04:55:14.000Z
|
2019-10-13T04:55:14.000Z
|
rl_utils/__init__.py
|
StuartCHAN/bert_rl_qa
|
2a4bb39d2db7646f57e66bda7c6694ba33022f76
|
[
"MIT"
] | 6
|
2021-04-30T20:56:34.000Z
|
2022-03-12T00:02:12.000Z
|
rl_utils/__init__.py
|
StuartCHAN/bert_rl_qa
|
2a4bb39d2db7646f57e66bda7c6694ba33022f76
|
[
"MIT"
] | 1
|
2021-05-15T02:59:38.000Z
|
2021-05-15T02:59:38.000Z
|
import rl_utils
import rl_utils.kgutils as kgutils
import rl_utils.queries as queries
import rl_utils.ans_reward as ans_reward
import rl_utils.sem_reward as sem_reward
import rl_utils.reward as reward
import rl_utils.generator_utils as generator_utils
| 32
| 51
| 0.859375
| 45
| 256
| 4.6
| 0.222222
| 0.270531
| 0.439614
| 0.275362
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117188
| 256
| 7
| 52
| 36.571429
| 0.915929
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
e0db3b43a1dd04f7efacd6d5034667b6b9e89203
| 505,218
|
py
|
Python
|
lib/googlecloudsdk/third_party/apis/aiplatform/v1beta1/aiplatform_v1beta1_client.py
|
google-cloud-sdk-unofficial/google-cloud-sdk
|
2a48a04df14be46c8745050f98768e30474a1aac
|
[
"Apache-2.0"
] | 2
|
2019-11-10T09:17:07.000Z
|
2019-12-18T13:44:08.000Z
|
lib/googlecloudsdk/third_party/apis/aiplatform/v1beta1/aiplatform_v1beta1_client.py
|
google-cloud-sdk-unofficial/google-cloud-sdk
|
2a48a04df14be46c8745050f98768e30474a1aac
|
[
"Apache-2.0"
] | null | null | null |
lib/googlecloudsdk/third_party/apis/aiplatform/v1beta1/aiplatform_v1beta1_client.py
|
google-cloud-sdk-unofficial/google-cloud-sdk
|
2a48a04df14be46c8745050f98768e30474a1aac
|
[
"Apache-2.0"
] | 1
|
2020-07-25T01:40:19.000Z
|
2020-07-25T01:40:19.000Z
|
"""Generated client library for aiplatform version v1beta1."""
# NOTE: This file is autogenerated and should not be edited by hand.
from __future__ import absolute_import
from apitools.base.py import base_api
from googlecloudsdk.third_party.apis.aiplatform.v1beta1 import aiplatform_v1beta1_messages as messages
class AiplatformV1beta1(base_api.BaseApiClient):
"""Generated client library for service aiplatform version v1beta1."""
MESSAGES_MODULE = messages
BASE_URL = 'https://aiplatform.googleapis.com/'
MTLS_BASE_URL = 'https://aiplatform.mtls.googleapis.com/'
_PACKAGE = 'aiplatform'
_SCOPES = ['https://www.googleapis.com/auth/cloud-platform']
_VERSION = 'v1beta1'
_CLIENT_ID = '1042881264118.apps.googleusercontent.com'
_CLIENT_SECRET = 'x_Tw5K8nnjoRAqULM9PFAC2b'
_USER_AGENT = 'google-cloud-sdk'
_CLIENT_CLASS_NAME = 'AiplatformV1beta1'
_URL_VERSION = 'v1beta1'
_API_KEY = None
def __init__(self, url='', credentials=None,
get_credentials=True, http=None, model=None,
log_request=False, log_response=False,
credentials_args=None, default_global_params=None,
additional_http_headers=None, response_encoding=None):
"""Create a new aiplatform handle."""
url = url or self.BASE_URL
super(AiplatformV1beta1, self).__init__(
url, credentials=credentials,
get_credentials=get_credentials, http=http, model=model,
log_request=log_request, log_response=log_response,
credentials_args=credentials_args,
default_global_params=default_global_params,
additional_http_headers=additional_http_headers,
response_encoding=response_encoding)
self.projects_locations_batchPredictionJobs = self.ProjectsLocationsBatchPredictionJobsService(self)
self.projects_locations_customJobs_operations = self.ProjectsLocationsCustomJobsOperationsService(self)
self.projects_locations_customJobs = self.ProjectsLocationsCustomJobsService(self)
self.projects_locations_dataLabelingJobs_operations = self.ProjectsLocationsDataLabelingJobsOperationsService(self)
self.projects_locations_dataLabelingJobs = self.ProjectsLocationsDataLabelingJobsService(self)
self.projects_locations_datasets_annotationSpecs_operations = self.ProjectsLocationsDatasetsAnnotationSpecsOperationsService(self)
self.projects_locations_datasets_annotationSpecs = self.ProjectsLocationsDatasetsAnnotationSpecsService(self)
self.projects_locations_datasets_dataItems_annotations_operations = self.ProjectsLocationsDatasetsDataItemsAnnotationsOperationsService(self)
self.projects_locations_datasets_dataItems_annotations = self.ProjectsLocationsDatasetsDataItemsAnnotationsService(self)
self.projects_locations_datasets_dataItems_operations = self.ProjectsLocationsDatasetsDataItemsOperationsService(self)
self.projects_locations_datasets_dataItems = self.ProjectsLocationsDatasetsDataItemsService(self)
self.projects_locations_datasets_operations = self.ProjectsLocationsDatasetsOperationsService(self)
self.projects_locations_datasets_savedQueries_operations = self.ProjectsLocationsDatasetsSavedQueriesOperationsService(self)
self.projects_locations_datasets_savedQueries = self.ProjectsLocationsDatasetsSavedQueriesService(self)
self.projects_locations_datasets = self.ProjectsLocationsDatasetsService(self)
self.projects_locations_edgeDevices_operations = self.ProjectsLocationsEdgeDevicesOperationsService(self)
self.projects_locations_edgeDevices = self.ProjectsLocationsEdgeDevicesService(self)
self.projects_locations_endpoints_operations = self.ProjectsLocationsEndpointsOperationsService(self)
self.projects_locations_endpoints = self.ProjectsLocationsEndpointsService(self)
self.projects_locations_featurestores_entityTypes_features_operations = self.ProjectsLocationsFeaturestoresEntityTypesFeaturesOperationsService(self)
self.projects_locations_featurestores_entityTypes_features = self.ProjectsLocationsFeaturestoresEntityTypesFeaturesService(self)
self.projects_locations_featurestores_entityTypes_operations = self.ProjectsLocationsFeaturestoresEntityTypesOperationsService(self)
self.projects_locations_featurestores_entityTypes = self.ProjectsLocationsFeaturestoresEntityTypesService(self)
self.projects_locations_featurestores_operations = self.ProjectsLocationsFeaturestoresOperationsService(self)
self.projects_locations_featurestores = self.ProjectsLocationsFeaturestoresService(self)
self.projects_locations_hyperparameterTuningJobs_operations = self.ProjectsLocationsHyperparameterTuningJobsOperationsService(self)
self.projects_locations_hyperparameterTuningJobs = self.ProjectsLocationsHyperparameterTuningJobsService(self)
self.projects_locations_indexEndpoints_operations = self.ProjectsLocationsIndexEndpointsOperationsService(self)
self.projects_locations_indexEndpoints = self.ProjectsLocationsIndexEndpointsService(self)
self.projects_locations_indexes_operations = self.ProjectsLocationsIndexesOperationsService(self)
self.projects_locations_indexes = self.ProjectsLocationsIndexesService(self)
self.projects_locations_metadataStores_artifacts = self.ProjectsLocationsMetadataStoresArtifactsService(self)
self.projects_locations_metadataStores_contexts = self.ProjectsLocationsMetadataStoresContextsService(self)
self.projects_locations_metadataStores_executions = self.ProjectsLocationsMetadataStoresExecutionsService(self)
self.projects_locations_metadataStores_metadataSchemas = self.ProjectsLocationsMetadataStoresMetadataSchemasService(self)
self.projects_locations_metadataStores = self.ProjectsLocationsMetadataStoresService(self)
self.projects_locations_migratableResources_operations = self.ProjectsLocationsMigratableResourcesOperationsService(self)
self.projects_locations_migratableResources = self.ProjectsLocationsMigratableResourcesService(self)
self.projects_locations_modelDeploymentMonitoringJobs_operations = self.ProjectsLocationsModelDeploymentMonitoringJobsOperationsService(self)
self.projects_locations_modelDeploymentMonitoringJobs = self.ProjectsLocationsModelDeploymentMonitoringJobsService(self)
self.projects_locations_models_evaluations_operations = self.ProjectsLocationsModelsEvaluationsOperationsService(self)
self.projects_locations_models_evaluations_slices = self.ProjectsLocationsModelsEvaluationsSlicesService(self)
self.projects_locations_models_evaluations = self.ProjectsLocationsModelsEvaluationsService(self)
self.projects_locations_models_operations = self.ProjectsLocationsModelsOperationsService(self)
self.projects_locations_models = self.ProjectsLocationsModelsService(self)
self.projects_locations_operations = self.ProjectsLocationsOperationsService(self)
self.projects_locations_pipelineJobs_operations = self.ProjectsLocationsPipelineJobsOperationsService(self)
self.projects_locations_pipelineJobs = self.ProjectsLocationsPipelineJobsService(self)
self.projects_locations_specialistPools_operations = self.ProjectsLocationsSpecialistPoolsOperationsService(self)
self.projects_locations_specialistPools = self.ProjectsLocationsSpecialistPoolsService(self)
self.projects_locations_studies_operations = self.ProjectsLocationsStudiesOperationsService(self)
self.projects_locations_studies_trials_operations = self.ProjectsLocationsStudiesTrialsOperationsService(self)
self.projects_locations_studies_trials = self.ProjectsLocationsStudiesTrialsService(self)
self.projects_locations_studies = self.ProjectsLocationsStudiesService(self)
self.projects_locations_tensorboards_experiments_operations = self.ProjectsLocationsTensorboardsExperimentsOperationsService(self)
self.projects_locations_tensorboards_experiments_runs_operations = self.ProjectsLocationsTensorboardsExperimentsRunsOperationsService(self)
self.projects_locations_tensorboards_experiments_runs_timeSeries_operations = self.ProjectsLocationsTensorboardsExperimentsRunsTimeSeriesOperationsService(self)
self.projects_locations_tensorboards_experiments_runs_timeSeries = self.ProjectsLocationsTensorboardsExperimentsRunsTimeSeriesService(self)
self.projects_locations_tensorboards_experiments_runs = self.ProjectsLocationsTensorboardsExperimentsRunsService(self)
self.projects_locations_tensorboards_experiments = self.ProjectsLocationsTensorboardsExperimentsService(self)
self.projects_locations_tensorboards_operations = self.ProjectsLocationsTensorboardsOperationsService(self)
self.projects_locations_tensorboards = self.ProjectsLocationsTensorboardsService(self)
self.projects_locations_trainingPipelines_operations = self.ProjectsLocationsTrainingPipelinesOperationsService(self)
self.projects_locations_trainingPipelines = self.ProjectsLocationsTrainingPipelinesService(self)
self.projects_locations = self.ProjectsLocationsService(self)
self.projects = self.ProjectsService(self)
class ProjectsLocationsBatchPredictionJobsService(base_api.BaseApiService):
"""Service class for the projects_locations_batchPredictionJobs resource."""
_NAME = 'projects_locations_batchPredictionJobs'
def __init__(self, client):
super(AiplatformV1beta1.ProjectsLocationsBatchPredictionJobsService, self).__init__(client)
self._upload_configs = {
}
def Cancel(self, request, global_params=None):
r"""Cancels a BatchPredictionJob. Starts asynchronous cancellation on the BatchPredictionJob. The server makes the best effort to cancel the job, but success is not guaranteed. Clients can use JobService.GetBatchPredictionJob or other methods to check whether the cancellation succeeded or whether the job completed despite cancellation. On a successful cancellation, the BatchPredictionJob is not deleted;instead its BatchPredictionJob.state is set to `CANCELLED`. Any files already outputted by the job are not deleted.
Args:
request: (AiplatformProjectsLocationsBatchPredictionJobsCancelRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Cancel')
return self._RunMethod(
config, request, global_params=global_params)
Cancel.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/batchPredictionJobs/{batchPredictionJobsId}:cancel',
http_method='POST',
method_id='aiplatform.projects.locations.batchPredictionJobs.cancel',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}:cancel',
request_field='googleCloudAiplatformV1beta1CancelBatchPredictionJobRequest',
request_type_name='AiplatformProjectsLocationsBatchPredictionJobsCancelRequest',
response_type_name='GoogleProtobufEmpty',
supports_download=False,
)
def Create(self, request, global_params=None):
r"""Creates a BatchPredictionJob. A BatchPredictionJob once created will right away be attempted to start.
Args:
request: (AiplatformProjectsLocationsBatchPredictionJobsCreateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1BatchPredictionJob) The response message.
"""
config = self.GetMethodConfig('Create')
return self._RunMethod(
config, request, global_params=global_params)
Create.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/batchPredictionJobs',
http_method='POST',
method_id='aiplatform.projects.locations.batchPredictionJobs.create',
ordered_params=['parent'],
path_params=['parent'],
query_params=[],
relative_path='v1beta1/{+parent}/batchPredictionJobs',
request_field='googleCloudAiplatformV1beta1BatchPredictionJob',
request_type_name='AiplatformProjectsLocationsBatchPredictionJobsCreateRequest',
response_type_name='GoogleCloudAiplatformV1beta1BatchPredictionJob',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes a BatchPredictionJob. Can only be called on jobs that already finished.
Args:
request: (AiplatformProjectsLocationsBatchPredictionJobsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/batchPredictionJobs/{batchPredictionJobsId}',
http_method='DELETE',
method_id='aiplatform.projects.locations.batchPredictionJobs.delete',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsBatchPredictionJobsDeleteRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets a BatchPredictionJob.
Args:
request: (AiplatformProjectsLocationsBatchPredictionJobsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1BatchPredictionJob) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/batchPredictionJobs/{batchPredictionJobsId}',
http_method='GET',
method_id='aiplatform.projects.locations.batchPredictionJobs.get',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsBatchPredictionJobsGetRequest',
response_type_name='GoogleCloudAiplatformV1beta1BatchPredictionJob',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists BatchPredictionJobs in a Location.
Args:
request: (AiplatformProjectsLocationsBatchPredictionJobsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1ListBatchPredictionJobsResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/batchPredictionJobs',
http_method='GET',
method_id='aiplatform.projects.locations.batchPredictionJobs.list',
ordered_params=['parent'],
path_params=['parent'],
query_params=['filter', 'pageSize', 'pageToken', 'readMask'],
relative_path='v1beta1/{+parent}/batchPredictionJobs',
request_field='',
request_type_name='AiplatformProjectsLocationsBatchPredictionJobsListRequest',
response_type_name='GoogleCloudAiplatformV1beta1ListBatchPredictionJobsResponse',
supports_download=False,
)
class ProjectsLocationsCustomJobsOperationsService(base_api.BaseApiService):
"""Service class for the projects_locations_customJobs_operations resource."""
_NAME = 'projects_locations_customJobs_operations'
def __init__(self, client):
super(AiplatformV1beta1.ProjectsLocationsCustomJobsOperationsService, self).__init__(client)
self._upload_configs = {
}
def Cancel(self, request, global_params=None):
r"""Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.
Args:
request: (AiplatformProjectsLocationsCustomJobsOperationsCancelRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Cancel')
return self._RunMethod(
config, request, global_params=global_params)
Cancel.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/customJobs/{customJobsId}/operations/{operationsId}:cancel',
http_method='POST',
method_id='aiplatform.projects.locations.customJobs.operations.cancel',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}:cancel',
request_field='',
request_type_name='AiplatformProjectsLocationsCustomJobsOperationsCancelRequest',
response_type_name='GoogleProtobufEmpty',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes a long-running operation. This method indicates that the client is no longer interested in the operation result. It does not cancel the operation. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`.
Args:
request: (AiplatformProjectsLocationsCustomJobsOperationsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/customJobs/{customJobsId}/operations/{operationsId}',
http_method='DELETE',
method_id='aiplatform.projects.locations.customJobs.operations.delete',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsCustomJobsOperationsDeleteRequest',
response_type_name='GoogleProtobufEmpty',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets the latest state of a long-running operation. Clients can use this method to poll the operation result at intervals as recommended by the API service.
Args:
request: (AiplatformProjectsLocationsCustomJobsOperationsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/customJobs/{customJobsId}/operations/{operationsId}',
http_method='GET',
method_id='aiplatform.projects.locations.customJobs.operations.get',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsCustomJobsOperationsGetRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists operations that match the specified filter in the request. If the server doesn't support this method, it returns `UNIMPLEMENTED`. NOTE: the `name` binding allows API services to override the binding to use different resource name schemes, such as `users/*/operations`. To override the binding, API services can add a binding such as `"/v1/{name=users/*}/operations"` to their service configuration. For backwards compatibility, the default name includes the operations collection id, however overriding users must ensure the name binding is the parent resource, without the operations collection id.
Args:
request: (AiplatformProjectsLocationsCustomJobsOperationsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningListOperationsResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/customJobs/{customJobsId}/operations',
http_method='GET',
method_id='aiplatform.projects.locations.customJobs.operations.list',
ordered_params=['name'],
path_params=['name'],
query_params=['filter', 'pageSize', 'pageToken'],
relative_path='v1beta1/{+name}/operations',
request_field='',
request_type_name='AiplatformProjectsLocationsCustomJobsOperationsListRequest',
response_type_name='GoogleLongrunningListOperationsResponse',
supports_download=False,
)
def Wait(self, request, global_params=None):
r"""Waits until the specified long-running operation is done or reaches at most a specified timeout, returning the latest state. If the operation is already done, the latest state is immediately returned. If the timeout specified is greater than the default HTTP/RPC timeout, the HTTP/RPC timeout is used. If the server does not support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Note that this method is on a best-effort basis. It may return the latest state before the specified timeout (including immediately), meaning even an immediate response is no guarantee that the operation is done.
Args:
request: (AiplatformProjectsLocationsCustomJobsOperationsWaitRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Wait')
return self._RunMethod(
config, request, global_params=global_params)
Wait.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/customJobs/{customJobsId}/operations/{operationsId}:wait',
http_method='POST',
method_id='aiplatform.projects.locations.customJobs.operations.wait',
ordered_params=['name'],
path_params=['name'],
query_params=['timeout'],
relative_path='v1beta1/{+name}:wait',
request_field='',
request_type_name='AiplatformProjectsLocationsCustomJobsOperationsWaitRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
class ProjectsLocationsCustomJobsService(base_api.BaseApiService):
"""Service class for the projects_locations_customJobs resource."""
_NAME = 'projects_locations_customJobs'
def __init__(self, client):
super(AiplatformV1beta1.ProjectsLocationsCustomJobsService, self).__init__(client)
self._upload_configs = {
}
def Cancel(self, request, global_params=None):
r"""Cancels a CustomJob. Starts asynchronous cancellation on the CustomJob. The server makes a best effort to cancel the job, but success is not guaranteed. Clients can use JobService.GetCustomJob or other methods to check whether the cancellation succeeded or whether the job completed despite cancellation. On successful cancellation, the CustomJob is not deleted; instead it becomes a job with a CustomJob.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`, and CustomJob.state is set to `CANCELLED`.
Args:
request: (AiplatformProjectsLocationsCustomJobsCancelRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Cancel')
return self._RunMethod(
config, request, global_params=global_params)
Cancel.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/customJobs/{customJobsId}:cancel',
http_method='POST',
method_id='aiplatform.projects.locations.customJobs.cancel',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}:cancel',
request_field='googleCloudAiplatformV1beta1CancelCustomJobRequest',
request_type_name='AiplatformProjectsLocationsCustomJobsCancelRequest',
response_type_name='GoogleProtobufEmpty',
supports_download=False,
)
def Create(self, request, global_params=None):
r"""Creates a CustomJob. A created CustomJob right away will be attempted to be run.
Args:
request: (AiplatformProjectsLocationsCustomJobsCreateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1CustomJob) The response message.
"""
config = self.GetMethodConfig('Create')
return self._RunMethod(
config, request, global_params=global_params)
Create.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/customJobs',
http_method='POST',
method_id='aiplatform.projects.locations.customJobs.create',
ordered_params=['parent'],
path_params=['parent'],
query_params=[],
relative_path='v1beta1/{+parent}/customJobs',
request_field='googleCloudAiplatformV1beta1CustomJob',
request_type_name='AiplatformProjectsLocationsCustomJobsCreateRequest',
response_type_name='GoogleCloudAiplatformV1beta1CustomJob',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes a CustomJob.
Args:
request: (AiplatformProjectsLocationsCustomJobsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/customJobs/{customJobsId}',
http_method='DELETE',
method_id='aiplatform.projects.locations.customJobs.delete',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsCustomJobsDeleteRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets a CustomJob.
Args:
request: (AiplatformProjectsLocationsCustomJobsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1CustomJob) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/customJobs/{customJobsId}',
http_method='GET',
method_id='aiplatform.projects.locations.customJobs.get',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsCustomJobsGetRequest',
response_type_name='GoogleCloudAiplatformV1beta1CustomJob',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists CustomJobs in a Location.
Args:
request: (AiplatformProjectsLocationsCustomJobsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1ListCustomJobsResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/customJobs',
http_method='GET',
method_id='aiplatform.projects.locations.customJobs.list',
ordered_params=['parent'],
path_params=['parent'],
query_params=['filter', 'pageSize', 'pageToken', 'readMask'],
relative_path='v1beta1/{+parent}/customJobs',
request_field='',
request_type_name='AiplatformProjectsLocationsCustomJobsListRequest',
response_type_name='GoogleCloudAiplatformV1beta1ListCustomJobsResponse',
supports_download=False,
)
class ProjectsLocationsDataLabelingJobsOperationsService(base_api.BaseApiService):
"""Service class for the projects_locations_dataLabelingJobs_operations resource."""
_NAME = 'projects_locations_dataLabelingJobs_operations'
def __init__(self, client):
super(AiplatformV1beta1.ProjectsLocationsDataLabelingJobsOperationsService, self).__init__(client)
self._upload_configs = {
}
def Cancel(self, request, global_params=None):
r"""Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.
Args:
request: (AiplatformProjectsLocationsDataLabelingJobsOperationsCancelRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Cancel')
return self._RunMethod(
config, request, global_params=global_params)
Cancel.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/dataLabelingJobs/{dataLabelingJobsId}/operations/{operationsId}:cancel',
http_method='POST',
method_id='aiplatform.projects.locations.dataLabelingJobs.operations.cancel',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}:cancel',
request_field='',
request_type_name='AiplatformProjectsLocationsDataLabelingJobsOperationsCancelRequest',
response_type_name='GoogleProtobufEmpty',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes a long-running operation. This method indicates that the client is no longer interested in the operation result. It does not cancel the operation. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`.
Args:
request: (AiplatformProjectsLocationsDataLabelingJobsOperationsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/dataLabelingJobs/{dataLabelingJobsId}/operations/{operationsId}',
http_method='DELETE',
method_id='aiplatform.projects.locations.dataLabelingJobs.operations.delete',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsDataLabelingJobsOperationsDeleteRequest',
response_type_name='GoogleProtobufEmpty',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets the latest state of a long-running operation. Clients can use this method to poll the operation result at intervals as recommended by the API service.
Args:
request: (AiplatformProjectsLocationsDataLabelingJobsOperationsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/dataLabelingJobs/{dataLabelingJobsId}/operations/{operationsId}',
http_method='GET',
method_id='aiplatform.projects.locations.dataLabelingJobs.operations.get',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsDataLabelingJobsOperationsGetRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists operations that match the specified filter in the request. If the server doesn't support this method, it returns `UNIMPLEMENTED`. NOTE: the `name` binding allows API services to override the binding to use different resource name schemes, such as `users/*/operations`. To override the binding, API services can add a binding such as `"/v1/{name=users/*}/operations"` to their service configuration. For backwards compatibility, the default name includes the operations collection id, however overriding users must ensure the name binding is the parent resource, without the operations collection id.
Args:
request: (AiplatformProjectsLocationsDataLabelingJobsOperationsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningListOperationsResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/dataLabelingJobs/{dataLabelingJobsId}/operations',
http_method='GET',
method_id='aiplatform.projects.locations.dataLabelingJobs.operations.list',
ordered_params=['name'],
path_params=['name'],
query_params=['filter', 'pageSize', 'pageToken'],
relative_path='v1beta1/{+name}/operations',
request_field='',
request_type_name='AiplatformProjectsLocationsDataLabelingJobsOperationsListRequest',
response_type_name='GoogleLongrunningListOperationsResponse',
supports_download=False,
)
def Wait(self, request, global_params=None):
r"""Waits until the specified long-running operation is done or reaches at most a specified timeout, returning the latest state. If the operation is already done, the latest state is immediately returned. If the timeout specified is greater than the default HTTP/RPC timeout, the HTTP/RPC timeout is used. If the server does not support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Note that this method is on a best-effort basis. It may return the latest state before the specified timeout (including immediately), meaning even an immediate response is no guarantee that the operation is done.
Args:
request: (AiplatformProjectsLocationsDataLabelingJobsOperationsWaitRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Wait')
return self._RunMethod(
config, request, global_params=global_params)
Wait.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/dataLabelingJobs/{dataLabelingJobsId}/operations/{operationsId}:wait',
http_method='POST',
method_id='aiplatform.projects.locations.dataLabelingJobs.operations.wait',
ordered_params=['name'],
path_params=['name'],
query_params=['timeout'],
relative_path='v1beta1/{+name}:wait',
request_field='',
request_type_name='AiplatformProjectsLocationsDataLabelingJobsOperationsWaitRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
class ProjectsLocationsDataLabelingJobsService(base_api.BaseApiService):
"""Service class for the projects_locations_dataLabelingJobs resource."""
_NAME = 'projects_locations_dataLabelingJobs'
def __init__(self, client):
super(AiplatformV1beta1.ProjectsLocationsDataLabelingJobsService, self).__init__(client)
self._upload_configs = {
}
def Cancel(self, request, global_params=None):
r"""Cancels a DataLabelingJob. Success of cancellation is not guaranteed.
Args:
request: (AiplatformProjectsLocationsDataLabelingJobsCancelRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Cancel')
return self._RunMethod(
config, request, global_params=global_params)
Cancel.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/dataLabelingJobs/{dataLabelingJobsId}:cancel',
http_method='POST',
method_id='aiplatform.projects.locations.dataLabelingJobs.cancel',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}:cancel',
request_field='googleCloudAiplatformV1beta1CancelDataLabelingJobRequest',
request_type_name='AiplatformProjectsLocationsDataLabelingJobsCancelRequest',
response_type_name='GoogleProtobufEmpty',
supports_download=False,
)
def Create(self, request, global_params=None):
r"""Creates a DataLabelingJob.
Args:
request: (AiplatformProjectsLocationsDataLabelingJobsCreateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1DataLabelingJob) The response message.
"""
config = self.GetMethodConfig('Create')
return self._RunMethod(
config, request, global_params=global_params)
Create.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/dataLabelingJobs',
http_method='POST',
method_id='aiplatform.projects.locations.dataLabelingJobs.create',
ordered_params=['parent'],
path_params=['parent'],
query_params=[],
relative_path='v1beta1/{+parent}/dataLabelingJobs',
request_field='googleCloudAiplatformV1beta1DataLabelingJob',
request_type_name='AiplatformProjectsLocationsDataLabelingJobsCreateRequest',
response_type_name='GoogleCloudAiplatformV1beta1DataLabelingJob',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes a DataLabelingJob.
Args:
request: (AiplatformProjectsLocationsDataLabelingJobsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/dataLabelingJobs/{dataLabelingJobsId}',
http_method='DELETE',
method_id='aiplatform.projects.locations.dataLabelingJobs.delete',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsDataLabelingJobsDeleteRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets a DataLabelingJob.
Args:
request: (AiplatformProjectsLocationsDataLabelingJobsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1DataLabelingJob) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/dataLabelingJobs/{dataLabelingJobsId}',
http_method='GET',
method_id='aiplatform.projects.locations.dataLabelingJobs.get',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsDataLabelingJobsGetRequest',
response_type_name='GoogleCloudAiplatformV1beta1DataLabelingJob',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists DataLabelingJobs in a Location.
Args:
request: (AiplatformProjectsLocationsDataLabelingJobsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1ListDataLabelingJobsResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/dataLabelingJobs',
http_method='GET',
method_id='aiplatform.projects.locations.dataLabelingJobs.list',
ordered_params=['parent'],
path_params=['parent'],
query_params=['filter', 'orderBy', 'pageSize', 'pageToken', 'readMask'],
relative_path='v1beta1/{+parent}/dataLabelingJobs',
request_field='',
request_type_name='AiplatformProjectsLocationsDataLabelingJobsListRequest',
response_type_name='GoogleCloudAiplatformV1beta1ListDataLabelingJobsResponse',
supports_download=False,
)
class ProjectsLocationsDatasetsAnnotationSpecsOperationsService(base_api.BaseApiService):
"""Service class for the projects_locations_datasets_annotationSpecs_operations resource."""
_NAME = 'projects_locations_datasets_annotationSpecs_operations'
def __init__(self, client):
super(AiplatformV1beta1.ProjectsLocationsDatasetsAnnotationSpecsOperationsService, self).__init__(client)
self._upload_configs = {
}
def Cancel(self, request, global_params=None):
r"""Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.
Args:
request: (AiplatformProjectsLocationsDatasetsAnnotationSpecsOperationsCancelRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Cancel')
return self._RunMethod(
config, request, global_params=global_params)
Cancel.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/datasets/{datasetsId}/annotationSpecs/{annotationSpecsId}/operations/{operationsId}:cancel',
http_method='POST',
method_id='aiplatform.projects.locations.datasets.annotationSpecs.operations.cancel',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}:cancel',
request_field='',
request_type_name='AiplatformProjectsLocationsDatasetsAnnotationSpecsOperationsCancelRequest',
response_type_name='GoogleProtobufEmpty',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes a long-running operation. This method indicates that the client is no longer interested in the operation result. It does not cancel the operation. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`.
Args:
request: (AiplatformProjectsLocationsDatasetsAnnotationSpecsOperationsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/datasets/{datasetsId}/annotationSpecs/{annotationSpecsId}/operations/{operationsId}',
http_method='DELETE',
method_id='aiplatform.projects.locations.datasets.annotationSpecs.operations.delete',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsDatasetsAnnotationSpecsOperationsDeleteRequest',
response_type_name='GoogleProtobufEmpty',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets the latest state of a long-running operation. Clients can use this method to poll the operation result at intervals as recommended by the API service.
Args:
request: (AiplatformProjectsLocationsDatasetsAnnotationSpecsOperationsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/datasets/{datasetsId}/annotationSpecs/{annotationSpecsId}/operations/{operationsId}',
http_method='GET',
method_id='aiplatform.projects.locations.datasets.annotationSpecs.operations.get',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsDatasetsAnnotationSpecsOperationsGetRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists operations that match the specified filter in the request. If the server doesn't support this method, it returns `UNIMPLEMENTED`. NOTE: the `name` binding allows API services to override the binding to use different resource name schemes, such as `users/*/operations`. To override the binding, API services can add a binding such as `"/v1/{name=users/*}/operations"` to their service configuration. For backwards compatibility, the default name includes the operations collection id, however overriding users must ensure the name binding is the parent resource, without the operations collection id.
Args:
request: (AiplatformProjectsLocationsDatasetsAnnotationSpecsOperationsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningListOperationsResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/datasets/{datasetsId}/annotationSpecs/{annotationSpecsId}/operations',
http_method='GET',
method_id='aiplatform.projects.locations.datasets.annotationSpecs.operations.list',
ordered_params=['name'],
path_params=['name'],
query_params=['filter', 'pageSize', 'pageToken'],
relative_path='v1beta1/{+name}/operations',
request_field='',
request_type_name='AiplatformProjectsLocationsDatasetsAnnotationSpecsOperationsListRequest',
response_type_name='GoogleLongrunningListOperationsResponse',
supports_download=False,
)
def Wait(self, request, global_params=None):
r"""Waits until the specified long-running operation is done or reaches at most a specified timeout, returning the latest state. If the operation is already done, the latest state is immediately returned. If the timeout specified is greater than the default HTTP/RPC timeout, the HTTP/RPC timeout is used. If the server does not support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Note that this method is on a best-effort basis. It may return the latest state before the specified timeout (including immediately), meaning even an immediate response is no guarantee that the operation is done.
Args:
request: (AiplatformProjectsLocationsDatasetsAnnotationSpecsOperationsWaitRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Wait')
return self._RunMethod(
config, request, global_params=global_params)
Wait.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/datasets/{datasetsId}/annotationSpecs/{annotationSpecsId}/operations/{operationsId}:wait',
http_method='POST',
method_id='aiplatform.projects.locations.datasets.annotationSpecs.operations.wait',
ordered_params=['name'],
path_params=['name'],
query_params=['timeout'],
relative_path='v1beta1/{+name}:wait',
request_field='',
request_type_name='AiplatformProjectsLocationsDatasetsAnnotationSpecsOperationsWaitRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
class ProjectsLocationsDatasetsAnnotationSpecsService(base_api.BaseApiService):
"""Service class for the projects_locations_datasets_annotationSpecs resource."""
_NAME = 'projects_locations_datasets_annotationSpecs'
def __init__(self, client):
super(AiplatformV1beta1.ProjectsLocationsDatasetsAnnotationSpecsService, self).__init__(client)
self._upload_configs = {
}
def Get(self, request, global_params=None):
r"""Gets an AnnotationSpec.
Args:
request: (AiplatformProjectsLocationsDatasetsAnnotationSpecsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1AnnotationSpec) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/datasets/{datasetsId}/annotationSpecs/{annotationSpecsId}',
http_method='GET',
method_id='aiplatform.projects.locations.datasets.annotationSpecs.get',
ordered_params=['name'],
path_params=['name'],
query_params=['readMask'],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsDatasetsAnnotationSpecsGetRequest',
response_type_name='GoogleCloudAiplatformV1beta1AnnotationSpec',
supports_download=False,
)
class ProjectsLocationsDatasetsDataItemsAnnotationsOperationsService(base_api.BaseApiService):
"""Service class for the projects_locations_datasets_dataItems_annotations_operations resource."""
_NAME = 'projects_locations_datasets_dataItems_annotations_operations'
def __init__(self, client):
super(AiplatformV1beta1.ProjectsLocationsDatasetsDataItemsAnnotationsOperationsService, self).__init__(client)
self._upload_configs = {
}
def Cancel(self, request, global_params=None):
r"""Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.
Args:
request: (AiplatformProjectsLocationsDatasetsDataItemsAnnotationsOperationsCancelRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Cancel')
return self._RunMethod(
config, request, global_params=global_params)
Cancel.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/datasets/{datasetsId}/dataItems/{dataItemsId}/annotations/{annotationsId}/operations/{operationsId}:cancel',
http_method='POST',
method_id='aiplatform.projects.locations.datasets.dataItems.annotations.operations.cancel',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}:cancel',
request_field='',
request_type_name='AiplatformProjectsLocationsDatasetsDataItemsAnnotationsOperationsCancelRequest',
response_type_name='GoogleProtobufEmpty',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes a long-running operation. This method indicates that the client is no longer interested in the operation result. It does not cancel the operation. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`.
Args:
request: (AiplatformProjectsLocationsDatasetsDataItemsAnnotationsOperationsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/datasets/{datasetsId}/dataItems/{dataItemsId}/annotations/{annotationsId}/operations/{operationsId}',
http_method='DELETE',
method_id='aiplatform.projects.locations.datasets.dataItems.annotations.operations.delete',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsDatasetsDataItemsAnnotationsOperationsDeleteRequest',
response_type_name='GoogleProtobufEmpty',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets the latest state of a long-running operation. Clients can use this method to poll the operation result at intervals as recommended by the API service.
Args:
request: (AiplatformProjectsLocationsDatasetsDataItemsAnnotationsOperationsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/datasets/{datasetsId}/dataItems/{dataItemsId}/annotations/{annotationsId}/operations/{operationsId}',
http_method='GET',
method_id='aiplatform.projects.locations.datasets.dataItems.annotations.operations.get',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsDatasetsDataItemsAnnotationsOperationsGetRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists operations that match the specified filter in the request. If the server doesn't support this method, it returns `UNIMPLEMENTED`. NOTE: the `name` binding allows API services to override the binding to use different resource name schemes, such as `users/*/operations`. To override the binding, API services can add a binding such as `"/v1/{name=users/*}/operations"` to their service configuration. For backwards compatibility, the default name includes the operations collection id, however overriding users must ensure the name binding is the parent resource, without the operations collection id.
Args:
request: (AiplatformProjectsLocationsDatasetsDataItemsAnnotationsOperationsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningListOperationsResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/datasets/{datasetsId}/dataItems/{dataItemsId}/annotations/{annotationsId}/operations',
http_method='GET',
method_id='aiplatform.projects.locations.datasets.dataItems.annotations.operations.list',
ordered_params=['name'],
path_params=['name'],
query_params=['filter', 'pageSize', 'pageToken'],
relative_path='v1beta1/{+name}/operations',
request_field='',
request_type_name='AiplatformProjectsLocationsDatasetsDataItemsAnnotationsOperationsListRequest',
response_type_name='GoogleLongrunningListOperationsResponse',
supports_download=False,
)
def Wait(self, request, global_params=None):
r"""Waits until the specified long-running operation is done or reaches at most a specified timeout, returning the latest state. If the operation is already done, the latest state is immediately returned. If the timeout specified is greater than the default HTTP/RPC timeout, the HTTP/RPC timeout is used. If the server does not support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Note that this method is on a best-effort basis. It may return the latest state before the specified timeout (including immediately), meaning even an immediate response is no guarantee that the operation is done.
Args:
request: (AiplatformProjectsLocationsDatasetsDataItemsAnnotationsOperationsWaitRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Wait')
return self._RunMethod(
config, request, global_params=global_params)
Wait.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/datasets/{datasetsId}/dataItems/{dataItemsId}/annotations/{annotationsId}/operations/{operationsId}:wait',
http_method='POST',
method_id='aiplatform.projects.locations.datasets.dataItems.annotations.operations.wait',
ordered_params=['name'],
path_params=['name'],
query_params=['timeout'],
relative_path='v1beta1/{+name}:wait',
request_field='',
request_type_name='AiplatformProjectsLocationsDatasetsDataItemsAnnotationsOperationsWaitRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
class ProjectsLocationsDatasetsDataItemsAnnotationsService(base_api.BaseApiService):
"""Service class for the projects_locations_datasets_dataItems_annotations resource."""
_NAME = 'projects_locations_datasets_dataItems_annotations'
def __init__(self, client):
super(AiplatformV1beta1.ProjectsLocationsDatasetsDataItemsAnnotationsService, self).__init__(client)
self._upload_configs = {
}
def List(self, request, global_params=None):
r"""Lists Annotations belongs to a dataitem.
Args:
request: (AiplatformProjectsLocationsDatasetsDataItemsAnnotationsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1ListAnnotationsResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/datasets/{datasetsId}/dataItems/{dataItemsId}/annotations',
http_method='GET',
method_id='aiplatform.projects.locations.datasets.dataItems.annotations.list',
ordered_params=['parent'],
path_params=['parent'],
query_params=['filter', 'orderBy', 'pageSize', 'pageToken', 'readMask'],
relative_path='v1beta1/{+parent}/annotations',
request_field='',
request_type_name='AiplatformProjectsLocationsDatasetsDataItemsAnnotationsListRequest',
response_type_name='GoogleCloudAiplatformV1beta1ListAnnotationsResponse',
supports_download=False,
)
class ProjectsLocationsDatasetsDataItemsOperationsService(base_api.BaseApiService):
"""Service class for the projects_locations_datasets_dataItems_operations resource."""
_NAME = 'projects_locations_datasets_dataItems_operations'
def __init__(self, client):
super(AiplatformV1beta1.ProjectsLocationsDatasetsDataItemsOperationsService, self).__init__(client)
self._upload_configs = {
}
def Cancel(self, request, global_params=None):
r"""Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.
Args:
request: (AiplatformProjectsLocationsDatasetsDataItemsOperationsCancelRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Cancel')
return self._RunMethod(
config, request, global_params=global_params)
Cancel.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/datasets/{datasetsId}/dataItems/{dataItemsId}/operations/{operationsId}:cancel',
http_method='POST',
method_id='aiplatform.projects.locations.datasets.dataItems.operations.cancel',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}:cancel',
request_field='',
request_type_name='AiplatformProjectsLocationsDatasetsDataItemsOperationsCancelRequest',
response_type_name='GoogleProtobufEmpty',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes a long-running operation. This method indicates that the client is no longer interested in the operation result. It does not cancel the operation. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`.
Args:
request: (AiplatformProjectsLocationsDatasetsDataItemsOperationsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/datasets/{datasetsId}/dataItems/{dataItemsId}/operations/{operationsId}',
http_method='DELETE',
method_id='aiplatform.projects.locations.datasets.dataItems.operations.delete',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsDatasetsDataItemsOperationsDeleteRequest',
response_type_name='GoogleProtobufEmpty',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets the latest state of a long-running operation. Clients can use this method to poll the operation result at intervals as recommended by the API service.
Args:
request: (AiplatformProjectsLocationsDatasetsDataItemsOperationsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/datasets/{datasetsId}/dataItems/{dataItemsId}/operations/{operationsId}',
http_method='GET',
method_id='aiplatform.projects.locations.datasets.dataItems.operations.get',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsDatasetsDataItemsOperationsGetRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists operations that match the specified filter in the request. If the server doesn't support this method, it returns `UNIMPLEMENTED`. NOTE: the `name` binding allows API services to override the binding to use different resource name schemes, such as `users/*/operations`. To override the binding, API services can add a binding such as `"/v1/{name=users/*}/operations"` to their service configuration. For backwards compatibility, the default name includes the operations collection id, however overriding users must ensure the name binding is the parent resource, without the operations collection id.
Args:
request: (AiplatformProjectsLocationsDatasetsDataItemsOperationsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningListOperationsResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/datasets/{datasetsId}/dataItems/{dataItemsId}/operations',
http_method='GET',
method_id='aiplatform.projects.locations.datasets.dataItems.operations.list',
ordered_params=['name'],
path_params=['name'],
query_params=['filter', 'pageSize', 'pageToken'],
relative_path='v1beta1/{+name}/operations',
request_field='',
request_type_name='AiplatformProjectsLocationsDatasetsDataItemsOperationsListRequest',
response_type_name='GoogleLongrunningListOperationsResponse',
supports_download=False,
)
def Wait(self, request, global_params=None):
r"""Waits until the specified long-running operation is done or reaches at most a specified timeout, returning the latest state. If the operation is already done, the latest state is immediately returned. If the timeout specified is greater than the default HTTP/RPC timeout, the HTTP/RPC timeout is used. If the server does not support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Note that this method is on a best-effort basis. It may return the latest state before the specified timeout (including immediately), meaning even an immediate response is no guarantee that the operation is done.
Args:
request: (AiplatformProjectsLocationsDatasetsDataItemsOperationsWaitRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Wait')
return self._RunMethod(
config, request, global_params=global_params)
Wait.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/datasets/{datasetsId}/dataItems/{dataItemsId}/operations/{operationsId}:wait',
http_method='POST',
method_id='aiplatform.projects.locations.datasets.dataItems.operations.wait',
ordered_params=['name'],
path_params=['name'],
query_params=['timeout'],
relative_path='v1beta1/{+name}:wait',
request_field='',
request_type_name='AiplatformProjectsLocationsDatasetsDataItemsOperationsWaitRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
class ProjectsLocationsDatasetsDataItemsService(base_api.BaseApiService):
"""Service class for the projects_locations_datasets_dataItems resource."""
_NAME = 'projects_locations_datasets_dataItems'
def __init__(self, client):
super(AiplatformV1beta1.ProjectsLocationsDatasetsDataItemsService, self).__init__(client)
self._upload_configs = {
}
def List(self, request, global_params=None):
r"""Lists DataItems in a Dataset.
Args:
request: (AiplatformProjectsLocationsDatasetsDataItemsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1ListDataItemsResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/datasets/{datasetsId}/dataItems',
http_method='GET',
method_id='aiplatform.projects.locations.datasets.dataItems.list',
ordered_params=['parent'],
path_params=['parent'],
query_params=['filter', 'orderBy', 'pageSize', 'pageToken', 'readMask'],
relative_path='v1beta1/{+parent}/dataItems',
request_field='',
request_type_name='AiplatformProjectsLocationsDatasetsDataItemsListRequest',
response_type_name='GoogleCloudAiplatformV1beta1ListDataItemsResponse',
supports_download=False,
)
class ProjectsLocationsDatasetsOperationsService(base_api.BaseApiService):
"""Service class for the projects_locations_datasets_operations resource."""
_NAME = 'projects_locations_datasets_operations'
def __init__(self, client):
super(AiplatformV1beta1.ProjectsLocationsDatasetsOperationsService, self).__init__(client)
self._upload_configs = {
}
def Cancel(self, request, global_params=None):
r"""Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.
Args:
request: (AiplatformProjectsLocationsDatasetsOperationsCancelRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Cancel')
return self._RunMethod(
config, request, global_params=global_params)
Cancel.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/datasets/{datasetsId}/operations/{operationsId}:cancel',
http_method='POST',
method_id='aiplatform.projects.locations.datasets.operations.cancel',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}:cancel',
request_field='',
request_type_name='AiplatformProjectsLocationsDatasetsOperationsCancelRequest',
response_type_name='GoogleProtobufEmpty',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes a long-running operation. This method indicates that the client is no longer interested in the operation result. It does not cancel the operation. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`.
Args:
request: (AiplatformProjectsLocationsDatasetsOperationsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/datasets/{datasetsId}/operations/{operationsId}',
http_method='DELETE',
method_id='aiplatform.projects.locations.datasets.operations.delete',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsDatasetsOperationsDeleteRequest',
response_type_name='GoogleProtobufEmpty',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets the latest state of a long-running operation. Clients can use this method to poll the operation result at intervals as recommended by the API service.
Args:
request: (AiplatformProjectsLocationsDatasetsOperationsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/datasets/{datasetsId}/operations/{operationsId}',
http_method='GET',
method_id='aiplatform.projects.locations.datasets.operations.get',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsDatasetsOperationsGetRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists operations that match the specified filter in the request. If the server doesn't support this method, it returns `UNIMPLEMENTED`. NOTE: the `name` binding allows API services to override the binding to use different resource name schemes, such as `users/*/operations`. To override the binding, API services can add a binding such as `"/v1/{name=users/*}/operations"` to their service configuration. For backwards compatibility, the default name includes the operations collection id, however overriding users must ensure the name binding is the parent resource, without the operations collection id.
Args:
request: (AiplatformProjectsLocationsDatasetsOperationsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningListOperationsResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/datasets/{datasetsId}/operations',
http_method='GET',
method_id='aiplatform.projects.locations.datasets.operations.list',
ordered_params=['name'],
path_params=['name'],
query_params=['filter', 'pageSize', 'pageToken'],
relative_path='v1beta1/{+name}/operations',
request_field='',
request_type_name='AiplatformProjectsLocationsDatasetsOperationsListRequest',
response_type_name='GoogleLongrunningListOperationsResponse',
supports_download=False,
)
def Wait(self, request, global_params=None):
r"""Waits until the specified long-running operation is done or reaches at most a specified timeout, returning the latest state. If the operation is already done, the latest state is immediately returned. If the timeout specified is greater than the default HTTP/RPC timeout, the HTTP/RPC timeout is used. If the server does not support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Note that this method is on a best-effort basis. It may return the latest state before the specified timeout (including immediately), meaning even an immediate response is no guarantee that the operation is done.
Args:
request: (AiplatformProjectsLocationsDatasetsOperationsWaitRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Wait')
return self._RunMethod(
config, request, global_params=global_params)
Wait.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/datasets/{datasetsId}/operations/{operationsId}:wait',
http_method='POST',
method_id='aiplatform.projects.locations.datasets.operations.wait',
ordered_params=['name'],
path_params=['name'],
query_params=['timeout'],
relative_path='v1beta1/{+name}:wait',
request_field='',
request_type_name='AiplatformProjectsLocationsDatasetsOperationsWaitRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
class ProjectsLocationsDatasetsSavedQueriesOperationsService(base_api.BaseApiService):
"""Service class for the projects_locations_datasets_savedQueries_operations resource."""
_NAME = 'projects_locations_datasets_savedQueries_operations'
def __init__(self, client):
super(AiplatformV1beta1.ProjectsLocationsDatasetsSavedQueriesOperationsService, self).__init__(client)
self._upload_configs = {
}
def Cancel(self, request, global_params=None):
r"""Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.
Args:
request: (AiplatformProjectsLocationsDatasetsSavedQueriesOperationsCancelRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Cancel')
return self._RunMethod(
config, request, global_params=global_params)
Cancel.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/datasets/{datasetsId}/savedQueries/{savedQueriesId}/operations/{operationsId}:cancel',
http_method='POST',
method_id='aiplatform.projects.locations.datasets.savedQueries.operations.cancel',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}:cancel',
request_field='',
request_type_name='AiplatformProjectsLocationsDatasetsSavedQueriesOperationsCancelRequest',
response_type_name='GoogleProtobufEmpty',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes a long-running operation. This method indicates that the client is no longer interested in the operation result. It does not cancel the operation. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`.
Args:
request: (AiplatformProjectsLocationsDatasetsSavedQueriesOperationsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/datasets/{datasetsId}/savedQueries/{savedQueriesId}/operations/{operationsId}',
http_method='DELETE',
method_id='aiplatform.projects.locations.datasets.savedQueries.operations.delete',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsDatasetsSavedQueriesOperationsDeleteRequest',
response_type_name='GoogleProtobufEmpty',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets the latest state of a long-running operation. Clients can use this method to poll the operation result at intervals as recommended by the API service.
Args:
request: (AiplatformProjectsLocationsDatasetsSavedQueriesOperationsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/datasets/{datasetsId}/savedQueries/{savedQueriesId}/operations/{operationsId}',
http_method='GET',
method_id='aiplatform.projects.locations.datasets.savedQueries.operations.get',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsDatasetsSavedQueriesOperationsGetRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists operations that match the specified filter in the request. If the server doesn't support this method, it returns `UNIMPLEMENTED`. NOTE: the `name` binding allows API services to override the binding to use different resource name schemes, such as `users/*/operations`. To override the binding, API services can add a binding such as `"/v1/{name=users/*}/operations"` to their service configuration. For backwards compatibility, the default name includes the operations collection id, however overriding users must ensure the name binding is the parent resource, without the operations collection id.
Args:
request: (AiplatformProjectsLocationsDatasetsSavedQueriesOperationsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningListOperationsResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/datasets/{datasetsId}/savedQueries/{savedQueriesId}/operations',
http_method='GET',
method_id='aiplatform.projects.locations.datasets.savedQueries.operations.list',
ordered_params=['name'],
path_params=['name'],
query_params=['filter', 'pageSize', 'pageToken'],
relative_path='v1beta1/{+name}/operations',
request_field='',
request_type_name='AiplatformProjectsLocationsDatasetsSavedQueriesOperationsListRequest',
response_type_name='GoogleLongrunningListOperationsResponse',
supports_download=False,
)
def Wait(self, request, global_params=None):
r"""Waits until the specified long-running operation is done or reaches at most a specified timeout, returning the latest state. If the operation is already done, the latest state is immediately returned. If the timeout specified is greater than the default HTTP/RPC timeout, the HTTP/RPC timeout is used. If the server does not support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Note that this method is on a best-effort basis. It may return the latest state before the specified timeout (including immediately), meaning even an immediate response is no guarantee that the operation is done.
Args:
request: (AiplatformProjectsLocationsDatasetsSavedQueriesOperationsWaitRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Wait')
return self._RunMethod(
config, request, global_params=global_params)
Wait.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/datasets/{datasetsId}/savedQueries/{savedQueriesId}/operations/{operationsId}:wait',
http_method='POST',
method_id='aiplatform.projects.locations.datasets.savedQueries.operations.wait',
ordered_params=['name'],
path_params=['name'],
query_params=['timeout'],
relative_path='v1beta1/{+name}:wait',
request_field='',
request_type_name='AiplatformProjectsLocationsDatasetsSavedQueriesOperationsWaitRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
class ProjectsLocationsDatasetsSavedQueriesService(base_api.BaseApiService):
"""Service class for the projects_locations_datasets_savedQueries resource."""
_NAME = 'projects_locations_datasets_savedQueries'
def __init__(self, client):
super(AiplatformV1beta1.ProjectsLocationsDatasetsSavedQueriesService, self).__init__(client)
self._upload_configs = {
}
class ProjectsLocationsDatasetsService(base_api.BaseApiService):
"""Service class for the projects_locations_datasets resource."""
_NAME = 'projects_locations_datasets'
def __init__(self, client):
super(AiplatformV1beta1.ProjectsLocationsDatasetsService, self).__init__(client)
self._upload_configs = {
}
def Create(self, request, global_params=None):
r"""Creates a Dataset.
Args:
request: (AiplatformProjectsLocationsDatasetsCreateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Create')
return self._RunMethod(
config, request, global_params=global_params)
Create.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/datasets',
http_method='POST',
method_id='aiplatform.projects.locations.datasets.create',
ordered_params=['parent'],
path_params=['parent'],
query_params=[],
relative_path='v1beta1/{+parent}/datasets',
request_field='googleCloudAiplatformV1beta1Dataset',
request_type_name='AiplatformProjectsLocationsDatasetsCreateRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes a Dataset.
Args:
request: (AiplatformProjectsLocationsDatasetsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/datasets/{datasetsId}',
http_method='DELETE',
method_id='aiplatform.projects.locations.datasets.delete',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsDatasetsDeleteRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def Export(self, request, global_params=None):
r"""Exports data from a Dataset.
Args:
request: (AiplatformProjectsLocationsDatasetsExportRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Export')
return self._RunMethod(
config, request, global_params=global_params)
Export.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/datasets/{datasetsId}:export',
http_method='POST',
method_id='aiplatform.projects.locations.datasets.export',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}:export',
request_field='googleCloudAiplatformV1beta1ExportDataRequest',
request_type_name='AiplatformProjectsLocationsDatasetsExportRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets a Dataset.
Args:
request: (AiplatformProjectsLocationsDatasetsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1Dataset) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/datasets/{datasetsId}',
http_method='GET',
method_id='aiplatform.projects.locations.datasets.get',
ordered_params=['name'],
path_params=['name'],
query_params=['readMask'],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsDatasetsGetRequest',
response_type_name='GoogleCloudAiplatformV1beta1Dataset',
supports_download=False,
)
def Import(self, request, global_params=None):
r"""Imports data into a Dataset.
Args:
request: (AiplatformProjectsLocationsDatasetsImportRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Import')
return self._RunMethod(
config, request, global_params=global_params)
Import.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/datasets/{datasetsId}:import',
http_method='POST',
method_id='aiplatform.projects.locations.datasets.import',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}:import',
request_field='googleCloudAiplatformV1beta1ImportDataRequest',
request_type_name='AiplatformProjectsLocationsDatasetsImportRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists Datasets in a Location.
Args:
request: (AiplatformProjectsLocationsDatasetsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1ListDatasetsResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/datasets',
http_method='GET',
method_id='aiplatform.projects.locations.datasets.list',
ordered_params=['parent'],
path_params=['parent'],
query_params=['filter', 'orderBy', 'pageSize', 'pageToken', 'readMask'],
relative_path='v1beta1/{+parent}/datasets',
request_field='',
request_type_name='AiplatformProjectsLocationsDatasetsListRequest',
response_type_name='GoogleCloudAiplatformV1beta1ListDatasetsResponse',
supports_download=False,
)
def Patch(self, request, global_params=None):
r"""Updates a Dataset.
Args:
request: (AiplatformProjectsLocationsDatasetsPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1Dataset) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/datasets/{datasetsId}',
http_method='PATCH',
method_id='aiplatform.projects.locations.datasets.patch',
ordered_params=['name'],
path_params=['name'],
query_params=['updateMask'],
relative_path='v1beta1/{+name}',
request_field='googleCloudAiplatformV1beta1Dataset',
request_type_name='AiplatformProjectsLocationsDatasetsPatchRequest',
response_type_name='GoogleCloudAiplatformV1beta1Dataset',
supports_download=False,
)
class ProjectsLocationsEdgeDevicesOperationsService(base_api.BaseApiService):
"""Service class for the projects_locations_edgeDevices_operations resource."""
_NAME = 'projects_locations_edgeDevices_operations'
def __init__(self, client):
super(AiplatformV1beta1.ProjectsLocationsEdgeDevicesOperationsService, self).__init__(client)
self._upload_configs = {
}
def Cancel(self, request, global_params=None):
r"""Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.
Args:
request: (AiplatformProjectsLocationsEdgeDevicesOperationsCancelRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Cancel')
return self._RunMethod(
config, request, global_params=global_params)
Cancel.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/edgeDevices/{edgeDevicesId}/operations/{operationsId}:cancel',
http_method='POST',
method_id='aiplatform.projects.locations.edgeDevices.operations.cancel',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}:cancel',
request_field='',
request_type_name='AiplatformProjectsLocationsEdgeDevicesOperationsCancelRequest',
response_type_name='GoogleProtobufEmpty',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes a long-running operation. This method indicates that the client is no longer interested in the operation result. It does not cancel the operation. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`.
Args:
request: (AiplatformProjectsLocationsEdgeDevicesOperationsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/edgeDevices/{edgeDevicesId}/operations/{operationsId}',
http_method='DELETE',
method_id='aiplatform.projects.locations.edgeDevices.operations.delete',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsEdgeDevicesOperationsDeleteRequest',
response_type_name='GoogleProtobufEmpty',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets the latest state of a long-running operation. Clients can use this method to poll the operation result at intervals as recommended by the API service.
Args:
request: (AiplatformProjectsLocationsEdgeDevicesOperationsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/edgeDevices/{edgeDevicesId}/operations/{operationsId}',
http_method='GET',
method_id='aiplatform.projects.locations.edgeDevices.operations.get',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsEdgeDevicesOperationsGetRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists operations that match the specified filter in the request. If the server doesn't support this method, it returns `UNIMPLEMENTED`. NOTE: the `name` binding allows API services to override the binding to use different resource name schemes, such as `users/*/operations`. To override the binding, API services can add a binding such as `"/v1/{name=users/*}/operations"` to their service configuration. For backwards compatibility, the default name includes the operations collection id, however overriding users must ensure the name binding is the parent resource, without the operations collection id.
Args:
request: (AiplatformProjectsLocationsEdgeDevicesOperationsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningListOperationsResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/edgeDevices/{edgeDevicesId}/operations',
http_method='GET',
method_id='aiplatform.projects.locations.edgeDevices.operations.list',
ordered_params=['name'],
path_params=['name'],
query_params=['filter', 'pageSize', 'pageToken'],
relative_path='v1beta1/{+name}/operations',
request_field='',
request_type_name='AiplatformProjectsLocationsEdgeDevicesOperationsListRequest',
response_type_name='GoogleLongrunningListOperationsResponse',
supports_download=False,
)
def Wait(self, request, global_params=None):
r"""Waits until the specified long-running operation is done or reaches at most a specified timeout, returning the latest state. If the operation is already done, the latest state is immediately returned. If the timeout specified is greater than the default HTTP/RPC timeout, the HTTP/RPC timeout is used. If the server does not support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Note that this method is on a best-effort basis. It may return the latest state before the specified timeout (including immediately), meaning even an immediate response is no guarantee that the operation is done.
Args:
request: (AiplatformProjectsLocationsEdgeDevicesOperationsWaitRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Wait')
return self._RunMethod(
config, request, global_params=global_params)
Wait.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/edgeDevices/{edgeDevicesId}/operations/{operationsId}:wait',
http_method='POST',
method_id='aiplatform.projects.locations.edgeDevices.operations.wait',
ordered_params=['name'],
path_params=['name'],
query_params=['timeout'],
relative_path='v1beta1/{+name}:wait',
request_field='',
request_type_name='AiplatformProjectsLocationsEdgeDevicesOperationsWaitRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
class ProjectsLocationsEdgeDevicesService(base_api.BaseApiService):
"""Service class for the projects_locations_edgeDevices resource."""
_NAME = 'projects_locations_edgeDevices'
def __init__(self, client):
super(AiplatformV1beta1.ProjectsLocationsEdgeDevicesService, self).__init__(client)
self._upload_configs = {
}
class ProjectsLocationsEndpointsOperationsService(base_api.BaseApiService):
"""Service class for the projects_locations_endpoints_operations resource."""
_NAME = 'projects_locations_endpoints_operations'
def __init__(self, client):
super(AiplatformV1beta1.ProjectsLocationsEndpointsOperationsService, self).__init__(client)
self._upload_configs = {
}
def Cancel(self, request, global_params=None):
r"""Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.
Args:
request: (AiplatformProjectsLocationsEndpointsOperationsCancelRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Cancel')
return self._RunMethod(
config, request, global_params=global_params)
Cancel.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/endpoints/{endpointsId}/operations/{operationsId}:cancel',
http_method='POST',
method_id='aiplatform.projects.locations.endpoints.operations.cancel',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}:cancel',
request_field='',
request_type_name='AiplatformProjectsLocationsEndpointsOperationsCancelRequest',
response_type_name='GoogleProtobufEmpty',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes a long-running operation. This method indicates that the client is no longer interested in the operation result. It does not cancel the operation. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`.
Args:
request: (AiplatformProjectsLocationsEndpointsOperationsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/endpoints/{endpointsId}/operations/{operationsId}',
http_method='DELETE',
method_id='aiplatform.projects.locations.endpoints.operations.delete',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsEndpointsOperationsDeleteRequest',
response_type_name='GoogleProtobufEmpty',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets the latest state of a long-running operation. Clients can use this method to poll the operation result at intervals as recommended by the API service.
Args:
request: (AiplatformProjectsLocationsEndpointsOperationsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/endpoints/{endpointsId}/operations/{operationsId}',
http_method='GET',
method_id='aiplatform.projects.locations.endpoints.operations.get',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsEndpointsOperationsGetRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists operations that match the specified filter in the request. If the server doesn't support this method, it returns `UNIMPLEMENTED`. NOTE: the `name` binding allows API services to override the binding to use different resource name schemes, such as `users/*/operations`. To override the binding, API services can add a binding such as `"/v1/{name=users/*}/operations"` to their service configuration. For backwards compatibility, the default name includes the operations collection id, however overriding users must ensure the name binding is the parent resource, without the operations collection id.
Args:
request: (AiplatformProjectsLocationsEndpointsOperationsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningListOperationsResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/endpoints/{endpointsId}/operations',
http_method='GET',
method_id='aiplatform.projects.locations.endpoints.operations.list',
ordered_params=['name'],
path_params=['name'],
query_params=['filter', 'pageSize', 'pageToken'],
relative_path='v1beta1/{+name}/operations',
request_field='',
request_type_name='AiplatformProjectsLocationsEndpointsOperationsListRequest',
response_type_name='GoogleLongrunningListOperationsResponse',
supports_download=False,
)
def Wait(self, request, global_params=None):
r"""Waits until the specified long-running operation is done or reaches at most a specified timeout, returning the latest state. If the operation is already done, the latest state is immediately returned. If the timeout specified is greater than the default HTTP/RPC timeout, the HTTP/RPC timeout is used. If the server does not support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Note that this method is on a best-effort basis. It may return the latest state before the specified timeout (including immediately), meaning even an immediate response is no guarantee that the operation is done.
Args:
request: (AiplatformProjectsLocationsEndpointsOperationsWaitRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Wait')
return self._RunMethod(
config, request, global_params=global_params)
Wait.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/endpoints/{endpointsId}/operations/{operationsId}:wait',
http_method='POST',
method_id='aiplatform.projects.locations.endpoints.operations.wait',
ordered_params=['name'],
path_params=['name'],
query_params=['timeout'],
relative_path='v1beta1/{+name}:wait',
request_field='',
request_type_name='AiplatformProjectsLocationsEndpointsOperationsWaitRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
class ProjectsLocationsEndpointsService(base_api.BaseApiService):
"""Service class for the projects_locations_endpoints resource."""
_NAME = 'projects_locations_endpoints'
def __init__(self, client):
super(AiplatformV1beta1.ProjectsLocationsEndpointsService, self).__init__(client)
self._upload_configs = {
}
def Create(self, request, global_params=None):
r"""Creates an Endpoint.
Args:
request: (AiplatformProjectsLocationsEndpointsCreateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Create')
return self._RunMethod(
config, request, global_params=global_params)
Create.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/endpoints',
http_method='POST',
method_id='aiplatform.projects.locations.endpoints.create',
ordered_params=['parent'],
path_params=['parent'],
query_params=['endpointId'],
relative_path='v1beta1/{+parent}/endpoints',
request_field='googleCloudAiplatformV1beta1Endpoint',
request_type_name='AiplatformProjectsLocationsEndpointsCreateRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes an Endpoint.
Args:
request: (AiplatformProjectsLocationsEndpointsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/endpoints/{endpointsId}',
http_method='DELETE',
method_id='aiplatform.projects.locations.endpoints.delete',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsEndpointsDeleteRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def DeployModel(self, request, global_params=None):
r"""Deploys a Model into this Endpoint, creating a DeployedModel within it.
Args:
request: (AiplatformProjectsLocationsEndpointsDeployModelRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('DeployModel')
return self._RunMethod(
config, request, global_params=global_params)
DeployModel.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/endpoints/{endpointsId}:deployModel',
http_method='POST',
method_id='aiplatform.projects.locations.endpoints.deployModel',
ordered_params=['endpoint'],
path_params=['endpoint'],
query_params=[],
relative_path='v1beta1/{+endpoint}:deployModel',
request_field='googleCloudAiplatformV1beta1DeployModelRequest',
request_type_name='AiplatformProjectsLocationsEndpointsDeployModelRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def Explain(self, request, global_params=None):
r"""Perform an online explanation. If deployed_model_id is specified, the corresponding DeployModel must have explanation_spec populated. If deployed_model_id is not specified, all DeployedModels must have explanation_spec populated. Only deployed AutoML tabular Models have explanation_spec.
Args:
request: (AiplatformProjectsLocationsEndpointsExplainRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1ExplainResponse) The response message.
"""
config = self.GetMethodConfig('Explain')
return self._RunMethod(
config, request, global_params=global_params)
Explain.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/endpoints/{endpointsId}:explain',
http_method='POST',
method_id='aiplatform.projects.locations.endpoints.explain',
ordered_params=['endpoint'],
path_params=['endpoint'],
query_params=[],
relative_path='v1beta1/{+endpoint}:explain',
request_field='googleCloudAiplatformV1beta1ExplainRequest',
request_type_name='AiplatformProjectsLocationsEndpointsExplainRequest',
response_type_name='GoogleCloudAiplatformV1beta1ExplainResponse',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets an Endpoint.
Args:
request: (AiplatformProjectsLocationsEndpointsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1Endpoint) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/endpoints/{endpointsId}',
http_method='GET',
method_id='aiplatform.projects.locations.endpoints.get',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsEndpointsGetRequest',
response_type_name='GoogleCloudAiplatformV1beta1Endpoint',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists Endpoints in a Location.
Args:
request: (AiplatformProjectsLocationsEndpointsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1ListEndpointsResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/endpoints',
http_method='GET',
method_id='aiplatform.projects.locations.endpoints.list',
ordered_params=['parent'],
path_params=['parent'],
query_params=['filter', 'pageSize', 'pageToken', 'readMask'],
relative_path='v1beta1/{+parent}/endpoints',
request_field='',
request_type_name='AiplatformProjectsLocationsEndpointsListRequest',
response_type_name='GoogleCloudAiplatformV1beta1ListEndpointsResponse',
supports_download=False,
)
def Patch(self, request, global_params=None):
r"""Updates an Endpoint.
Args:
request: (AiplatformProjectsLocationsEndpointsPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1Endpoint) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/endpoints/{endpointsId}',
http_method='PATCH',
method_id='aiplatform.projects.locations.endpoints.patch',
ordered_params=['name'],
path_params=['name'],
query_params=['updateMask'],
relative_path='v1beta1/{+name}',
request_field='googleCloudAiplatformV1beta1Endpoint',
request_type_name='AiplatformProjectsLocationsEndpointsPatchRequest',
response_type_name='GoogleCloudAiplatformV1beta1Endpoint',
supports_download=False,
)
def Predict(self, request, global_params=None):
r"""Perform an online prediction.
Args:
request: (AiplatformProjectsLocationsEndpointsPredictRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1PredictResponse) The response message.
"""
config = self.GetMethodConfig('Predict')
return self._RunMethod(
config, request, global_params=global_params)
Predict.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/endpoints/{endpointsId}:predict',
http_method='POST',
method_id='aiplatform.projects.locations.endpoints.predict',
ordered_params=['endpoint'],
path_params=['endpoint'],
query_params=[],
relative_path='v1beta1/{+endpoint}:predict',
request_field='googleCloudAiplatformV1beta1PredictRequest',
request_type_name='AiplatformProjectsLocationsEndpointsPredictRequest',
response_type_name='GoogleCloudAiplatformV1beta1PredictResponse',
supports_download=False,
)
def RawPredict(self, request, global_params=None):
r"""Perform an online prediction with an arbitrary HTTP payload. The response includes the following HTTP headers: * `X-Vertex-AI-Endpoint-Id`: ID of the Endpoint that served this prediction. * `X-Vertex-AI-Deployed-Model-Id`: ID of the Endpoint's DeployedModel that served this prediction.
Args:
request: (AiplatformProjectsLocationsEndpointsRawPredictRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleApiHttpBody) The response message.
"""
config = self.GetMethodConfig('RawPredict')
return self._RunMethod(
config, request, global_params=global_params)
RawPredict.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/endpoints/{endpointsId}:rawPredict',
http_method='POST',
method_id='aiplatform.projects.locations.endpoints.rawPredict',
ordered_params=['endpoint'],
path_params=['endpoint'],
query_params=[],
relative_path='v1beta1/{+endpoint}:rawPredict',
request_field='googleCloudAiplatformV1beta1RawPredictRequest',
request_type_name='AiplatformProjectsLocationsEndpointsRawPredictRequest',
response_type_name='GoogleApiHttpBody',
supports_download=False,
)
def UndeployModel(self, request, global_params=None):
r"""Undeploys a Model from an Endpoint, removing a DeployedModel from it, and freeing all resources it's using.
Args:
request: (AiplatformProjectsLocationsEndpointsUndeployModelRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('UndeployModel')
return self._RunMethod(
config, request, global_params=global_params)
UndeployModel.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/endpoints/{endpointsId}:undeployModel',
http_method='POST',
method_id='aiplatform.projects.locations.endpoints.undeployModel',
ordered_params=['endpoint'],
path_params=['endpoint'],
query_params=[],
relative_path='v1beta1/{+endpoint}:undeployModel',
request_field='googleCloudAiplatformV1beta1UndeployModelRequest',
request_type_name='AiplatformProjectsLocationsEndpointsUndeployModelRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
class ProjectsLocationsFeaturestoresEntityTypesFeaturesOperationsService(base_api.BaseApiService):
"""Service class for the projects_locations_featurestores_entityTypes_features_operations resource."""
_NAME = 'projects_locations_featurestores_entityTypes_features_operations'
def __init__(self, client):
super(AiplatformV1beta1.ProjectsLocationsFeaturestoresEntityTypesFeaturesOperationsService, self).__init__(client)
self._upload_configs = {
}
def Cancel(self, request, global_params=None):
r"""Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.
Args:
request: (AiplatformProjectsLocationsFeaturestoresEntityTypesFeaturesOperationsCancelRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Cancel')
return self._RunMethod(
config, request, global_params=global_params)
Cancel.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/featurestores/{featurestoresId}/entityTypes/{entityTypesId}/features/{featuresId}/operations/{operationsId}:cancel',
http_method='POST',
method_id='aiplatform.projects.locations.featurestores.entityTypes.features.operations.cancel',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}:cancel',
request_field='',
request_type_name='AiplatformProjectsLocationsFeaturestoresEntityTypesFeaturesOperationsCancelRequest',
response_type_name='GoogleProtobufEmpty',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes a long-running operation. This method indicates that the client is no longer interested in the operation result. It does not cancel the operation. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`.
Args:
request: (AiplatformProjectsLocationsFeaturestoresEntityTypesFeaturesOperationsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/featurestores/{featurestoresId}/entityTypes/{entityTypesId}/features/{featuresId}/operations/{operationsId}',
http_method='DELETE',
method_id='aiplatform.projects.locations.featurestores.entityTypes.features.operations.delete',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsFeaturestoresEntityTypesFeaturesOperationsDeleteRequest',
response_type_name='GoogleProtobufEmpty',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets the latest state of a long-running operation. Clients can use this method to poll the operation result at intervals as recommended by the API service.
Args:
request: (AiplatformProjectsLocationsFeaturestoresEntityTypesFeaturesOperationsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/featurestores/{featurestoresId}/entityTypes/{entityTypesId}/features/{featuresId}/operations/{operationsId}',
http_method='GET',
method_id='aiplatform.projects.locations.featurestores.entityTypes.features.operations.get',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsFeaturestoresEntityTypesFeaturesOperationsGetRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists operations that match the specified filter in the request. If the server doesn't support this method, it returns `UNIMPLEMENTED`. NOTE: the `name` binding allows API services to override the binding to use different resource name schemes, such as `users/*/operations`. To override the binding, API services can add a binding such as `"/v1/{name=users/*}/operations"` to their service configuration. For backwards compatibility, the default name includes the operations collection id, however overriding users must ensure the name binding is the parent resource, without the operations collection id.
Args:
request: (AiplatformProjectsLocationsFeaturestoresEntityTypesFeaturesOperationsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningListOperationsResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/featurestores/{featurestoresId}/entityTypes/{entityTypesId}/features/{featuresId}/operations',
http_method='GET',
method_id='aiplatform.projects.locations.featurestores.entityTypes.features.operations.list',
ordered_params=['name'],
path_params=['name'],
query_params=['filter', 'pageSize', 'pageToken'],
relative_path='v1beta1/{+name}/operations',
request_field='',
request_type_name='AiplatformProjectsLocationsFeaturestoresEntityTypesFeaturesOperationsListRequest',
response_type_name='GoogleLongrunningListOperationsResponse',
supports_download=False,
)
def Wait(self, request, global_params=None):
r"""Waits until the specified long-running operation is done or reaches at most a specified timeout, returning the latest state. If the operation is already done, the latest state is immediately returned. If the timeout specified is greater than the default HTTP/RPC timeout, the HTTP/RPC timeout is used. If the server does not support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Note that this method is on a best-effort basis. It may return the latest state before the specified timeout (including immediately), meaning even an immediate response is no guarantee that the operation is done.
Args:
request: (AiplatformProjectsLocationsFeaturestoresEntityTypesFeaturesOperationsWaitRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Wait')
return self._RunMethod(
config, request, global_params=global_params)
Wait.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/featurestores/{featurestoresId}/entityTypes/{entityTypesId}/features/{featuresId}/operations/{operationsId}:wait',
http_method='POST',
method_id='aiplatform.projects.locations.featurestores.entityTypes.features.operations.wait',
ordered_params=['name'],
path_params=['name'],
query_params=['timeout'],
relative_path='v1beta1/{+name}:wait',
request_field='',
request_type_name='AiplatformProjectsLocationsFeaturestoresEntityTypesFeaturesOperationsWaitRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
class ProjectsLocationsFeaturestoresEntityTypesFeaturesService(base_api.BaseApiService):
"""Service class for the projects_locations_featurestores_entityTypes_features resource."""
_NAME = 'projects_locations_featurestores_entityTypes_features'
def __init__(self, client):
super(AiplatformV1beta1.ProjectsLocationsFeaturestoresEntityTypesFeaturesService, self).__init__(client)
self._upload_configs = {
}
def BatchCreate(self, request, global_params=None):
r"""Creates a batch of Features in a given EntityType.
Args:
request: (AiplatformProjectsLocationsFeaturestoresEntityTypesFeaturesBatchCreateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('BatchCreate')
return self._RunMethod(
config, request, global_params=global_params)
BatchCreate.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/featurestores/{featurestoresId}/entityTypes/{entityTypesId}/features:batchCreate',
http_method='POST',
method_id='aiplatform.projects.locations.featurestores.entityTypes.features.batchCreate',
ordered_params=['parent'],
path_params=['parent'],
query_params=[],
relative_path='v1beta1/{+parent}/features:batchCreate',
request_field='googleCloudAiplatformV1beta1BatchCreateFeaturesRequest',
request_type_name='AiplatformProjectsLocationsFeaturestoresEntityTypesFeaturesBatchCreateRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def Create(self, request, global_params=None):
r"""Creates a new Feature in a given EntityType.
Args:
request: (AiplatformProjectsLocationsFeaturestoresEntityTypesFeaturesCreateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Create')
return self._RunMethod(
config, request, global_params=global_params)
Create.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/featurestores/{featurestoresId}/entityTypes/{entityTypesId}/features',
http_method='POST',
method_id='aiplatform.projects.locations.featurestores.entityTypes.features.create',
ordered_params=['parent'],
path_params=['parent'],
query_params=['featureId'],
relative_path='v1beta1/{+parent}/features',
request_field='googleCloudAiplatformV1beta1Feature',
request_type_name='AiplatformProjectsLocationsFeaturestoresEntityTypesFeaturesCreateRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes a single Feature.
Args:
request: (AiplatformProjectsLocationsFeaturestoresEntityTypesFeaturesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/featurestores/{featurestoresId}/entityTypes/{entityTypesId}/features/{featuresId}',
http_method='DELETE',
method_id='aiplatform.projects.locations.featurestores.entityTypes.features.delete',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsFeaturestoresEntityTypesFeaturesDeleteRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets details of a single Feature.
Args:
request: (AiplatformProjectsLocationsFeaturestoresEntityTypesFeaturesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1Feature) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/featurestores/{featurestoresId}/entityTypes/{entityTypesId}/features/{featuresId}',
http_method='GET',
method_id='aiplatform.projects.locations.featurestores.entityTypes.features.get',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsFeaturestoresEntityTypesFeaturesGetRequest',
response_type_name='GoogleCloudAiplatformV1beta1Feature',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists Features in a given EntityType.
Args:
request: (AiplatformProjectsLocationsFeaturestoresEntityTypesFeaturesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1ListFeaturesResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/featurestores/{featurestoresId}/entityTypes/{entityTypesId}/features',
http_method='GET',
method_id='aiplatform.projects.locations.featurestores.entityTypes.features.list',
ordered_params=['parent'],
path_params=['parent'],
query_params=['filter', 'latestStatsCount', 'orderBy', 'pageSize', 'pageToken', 'readMask'],
relative_path='v1beta1/{+parent}/features',
request_field='',
request_type_name='AiplatformProjectsLocationsFeaturestoresEntityTypesFeaturesListRequest',
response_type_name='GoogleCloudAiplatformV1beta1ListFeaturesResponse',
supports_download=False,
)
def Patch(self, request, global_params=None):
r"""Updates the parameters of a single Feature.
Args:
request: (AiplatformProjectsLocationsFeaturestoresEntityTypesFeaturesPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1Feature) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/featurestores/{featurestoresId}/entityTypes/{entityTypesId}/features/{featuresId}',
http_method='PATCH',
method_id='aiplatform.projects.locations.featurestores.entityTypes.features.patch',
ordered_params=['name'],
path_params=['name'],
query_params=['updateMask'],
relative_path='v1beta1/{+name}',
request_field='googleCloudAiplatformV1beta1Feature',
request_type_name='AiplatformProjectsLocationsFeaturestoresEntityTypesFeaturesPatchRequest',
response_type_name='GoogleCloudAiplatformV1beta1Feature',
supports_download=False,
)
class ProjectsLocationsFeaturestoresEntityTypesOperationsService(base_api.BaseApiService):
"""Service class for the projects_locations_featurestores_entityTypes_operations resource."""
_NAME = 'projects_locations_featurestores_entityTypes_operations'
def __init__(self, client):
super(AiplatformV1beta1.ProjectsLocationsFeaturestoresEntityTypesOperationsService, self).__init__(client)
self._upload_configs = {
}
def Cancel(self, request, global_params=None):
r"""Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.
Args:
request: (AiplatformProjectsLocationsFeaturestoresEntityTypesOperationsCancelRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Cancel')
return self._RunMethod(
config, request, global_params=global_params)
Cancel.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/featurestores/{featurestoresId}/entityTypes/{entityTypesId}/operations/{operationsId}:cancel',
http_method='POST',
method_id='aiplatform.projects.locations.featurestores.entityTypes.operations.cancel',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}:cancel',
request_field='',
request_type_name='AiplatformProjectsLocationsFeaturestoresEntityTypesOperationsCancelRequest',
response_type_name='GoogleProtobufEmpty',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes a long-running operation. This method indicates that the client is no longer interested in the operation result. It does not cancel the operation. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`.
Args:
request: (AiplatformProjectsLocationsFeaturestoresEntityTypesOperationsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/featurestores/{featurestoresId}/entityTypes/{entityTypesId}/operations/{operationsId}',
http_method='DELETE',
method_id='aiplatform.projects.locations.featurestores.entityTypes.operations.delete',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsFeaturestoresEntityTypesOperationsDeleteRequest',
response_type_name='GoogleProtobufEmpty',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets the latest state of a long-running operation. Clients can use this method to poll the operation result at intervals as recommended by the API service.
Args:
request: (AiplatformProjectsLocationsFeaturestoresEntityTypesOperationsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/featurestores/{featurestoresId}/entityTypes/{entityTypesId}/operations/{operationsId}',
http_method='GET',
method_id='aiplatform.projects.locations.featurestores.entityTypes.operations.get',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsFeaturestoresEntityTypesOperationsGetRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists operations that match the specified filter in the request. If the server doesn't support this method, it returns `UNIMPLEMENTED`. NOTE: the `name` binding allows API services to override the binding to use different resource name schemes, such as `users/*/operations`. To override the binding, API services can add a binding such as `"/v1/{name=users/*}/operations"` to their service configuration. For backwards compatibility, the default name includes the operations collection id, however overriding users must ensure the name binding is the parent resource, without the operations collection id.
Args:
request: (AiplatformProjectsLocationsFeaturestoresEntityTypesOperationsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningListOperationsResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/featurestores/{featurestoresId}/entityTypes/{entityTypesId}/operations',
http_method='GET',
method_id='aiplatform.projects.locations.featurestores.entityTypes.operations.list',
ordered_params=['name'],
path_params=['name'],
query_params=['filter', 'pageSize', 'pageToken'],
relative_path='v1beta1/{+name}/operations',
request_field='',
request_type_name='AiplatformProjectsLocationsFeaturestoresEntityTypesOperationsListRequest',
response_type_name='GoogleLongrunningListOperationsResponse',
supports_download=False,
)
def Wait(self, request, global_params=None):
r"""Waits until the specified long-running operation is done or reaches at most a specified timeout, returning the latest state. If the operation is already done, the latest state is immediately returned. If the timeout specified is greater than the default HTTP/RPC timeout, the HTTP/RPC timeout is used. If the server does not support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Note that this method is on a best-effort basis. It may return the latest state before the specified timeout (including immediately), meaning even an immediate response is no guarantee that the operation is done.
Args:
request: (AiplatformProjectsLocationsFeaturestoresEntityTypesOperationsWaitRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Wait')
return self._RunMethod(
config, request, global_params=global_params)
Wait.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/featurestores/{featurestoresId}/entityTypes/{entityTypesId}/operations/{operationsId}:wait',
http_method='POST',
method_id='aiplatform.projects.locations.featurestores.entityTypes.operations.wait',
ordered_params=['name'],
path_params=['name'],
query_params=['timeout'],
relative_path='v1beta1/{+name}:wait',
request_field='',
request_type_name='AiplatformProjectsLocationsFeaturestoresEntityTypesOperationsWaitRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
class ProjectsLocationsFeaturestoresEntityTypesService(base_api.BaseApiService):
"""Service class for the projects_locations_featurestores_entityTypes resource."""
_NAME = 'projects_locations_featurestores_entityTypes'
def __init__(self, client):
super(AiplatformV1beta1.ProjectsLocationsFeaturestoresEntityTypesService, self).__init__(client)
self._upload_configs = {
}
def Create(self, request, global_params=None):
r"""Creates a new EntityType in a given Featurestore.
Args:
request: (AiplatformProjectsLocationsFeaturestoresEntityTypesCreateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Create')
return self._RunMethod(
config, request, global_params=global_params)
Create.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/featurestores/{featurestoresId}/entityTypes',
http_method='POST',
method_id='aiplatform.projects.locations.featurestores.entityTypes.create',
ordered_params=['parent'],
path_params=['parent'],
query_params=['entityTypeId'],
relative_path='v1beta1/{+parent}/entityTypes',
request_field='googleCloudAiplatformV1beta1EntityType',
request_type_name='AiplatformProjectsLocationsFeaturestoresEntityTypesCreateRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes a single EntityType. The EntityType must not have any Features or `force` must be set to true for the request to succeed.
Args:
request: (AiplatformProjectsLocationsFeaturestoresEntityTypesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/featurestores/{featurestoresId}/entityTypes/{entityTypesId}',
http_method='DELETE',
method_id='aiplatform.projects.locations.featurestores.entityTypes.delete',
ordered_params=['name'],
path_params=['name'],
query_params=['force'],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsFeaturestoresEntityTypesDeleteRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def ExportFeatureValues(self, request, global_params=None):
r"""Exports Feature values from all the entities of a target EntityType.
Args:
request: (AiplatformProjectsLocationsFeaturestoresEntityTypesExportFeatureValuesRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('ExportFeatureValues')
return self._RunMethod(
config, request, global_params=global_params)
ExportFeatureValues.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/featurestores/{featurestoresId}/entityTypes/{entityTypesId}:exportFeatureValues',
http_method='POST',
method_id='aiplatform.projects.locations.featurestores.entityTypes.exportFeatureValues',
ordered_params=['entityType'],
path_params=['entityType'],
query_params=[],
relative_path='v1beta1/{+entityType}:exportFeatureValues',
request_field='googleCloudAiplatformV1beta1ExportFeatureValuesRequest',
request_type_name='AiplatformProjectsLocationsFeaturestoresEntityTypesExportFeatureValuesRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets details of a single EntityType.
Args:
request: (AiplatformProjectsLocationsFeaturestoresEntityTypesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1EntityType) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/featurestores/{featurestoresId}/entityTypes/{entityTypesId}',
http_method='GET',
method_id='aiplatform.projects.locations.featurestores.entityTypes.get',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsFeaturestoresEntityTypesGetRequest',
response_type_name='GoogleCloudAiplatformV1beta1EntityType',
supports_download=False,
)
def ImportFeatureValues(self, request, global_params=None):
r"""Imports Feature values into the Featurestore from a source storage. The progress of the import is tracked by the returned operation. The imported features are guaranteed to be visible to subsequent read operations after the operation is marked as successfully done. If an import operation fails, the Feature values returned from reads and exports may be inconsistent. If consistency is required, the caller must retry the same import request again and wait till the new operation returned is marked as successfully done. There are also scenarios where the caller can cause inconsistency. - Source data for import contains multiple distinct Feature values for the same entity ID and timestamp. - Source is modified during an import. This includes adding, updating, or removing source data and/or metadata. Examples of updating metadata include but are not limited to changing storage location, storage class, or retention policy. - Online serving cluster is under-provisioned.
Args:
request: (AiplatformProjectsLocationsFeaturestoresEntityTypesImportFeatureValuesRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('ImportFeatureValues')
return self._RunMethod(
config, request, global_params=global_params)
ImportFeatureValues.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/featurestores/{featurestoresId}/entityTypes/{entityTypesId}:importFeatureValues',
http_method='POST',
method_id='aiplatform.projects.locations.featurestores.entityTypes.importFeatureValues',
ordered_params=['entityType'],
path_params=['entityType'],
query_params=[],
relative_path='v1beta1/{+entityType}:importFeatureValues',
request_field='googleCloudAiplatformV1beta1ImportFeatureValuesRequest',
request_type_name='AiplatformProjectsLocationsFeaturestoresEntityTypesImportFeatureValuesRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists EntityTypes in a given Featurestore.
Args:
request: (AiplatformProjectsLocationsFeaturestoresEntityTypesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1ListEntityTypesResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/featurestores/{featurestoresId}/entityTypes',
http_method='GET',
method_id='aiplatform.projects.locations.featurestores.entityTypes.list',
ordered_params=['parent'],
path_params=['parent'],
query_params=['filter', 'orderBy', 'pageSize', 'pageToken', 'readMask'],
relative_path='v1beta1/{+parent}/entityTypes',
request_field='',
request_type_name='AiplatformProjectsLocationsFeaturestoresEntityTypesListRequest',
response_type_name='GoogleCloudAiplatformV1beta1ListEntityTypesResponse',
supports_download=False,
)
def Patch(self, request, global_params=None):
r"""Updates the parameters of a single EntityType.
Args:
request: (AiplatformProjectsLocationsFeaturestoresEntityTypesPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1EntityType) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/featurestores/{featurestoresId}/entityTypes/{entityTypesId}',
http_method='PATCH',
method_id='aiplatform.projects.locations.featurestores.entityTypes.patch',
ordered_params=['name'],
path_params=['name'],
query_params=['updateMask'],
relative_path='v1beta1/{+name}',
request_field='googleCloudAiplatformV1beta1EntityType',
request_type_name='AiplatformProjectsLocationsFeaturestoresEntityTypesPatchRequest',
response_type_name='GoogleCloudAiplatformV1beta1EntityType',
supports_download=False,
)
def ReadFeatureValues(self, request, global_params=None):
r"""Reads Feature values of a specific entity of an EntityType. For reading feature values of multiple entities of an EntityType, please use StreamingReadFeatureValues.
Args:
request: (AiplatformProjectsLocationsFeaturestoresEntityTypesReadFeatureValuesRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1ReadFeatureValuesResponse) The response message.
"""
config = self.GetMethodConfig('ReadFeatureValues')
return self._RunMethod(
config, request, global_params=global_params)
ReadFeatureValues.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/featurestores/{featurestoresId}/entityTypes/{entityTypesId}:readFeatureValues',
http_method='POST',
method_id='aiplatform.projects.locations.featurestores.entityTypes.readFeatureValues',
ordered_params=['entityType'],
path_params=['entityType'],
query_params=[],
relative_path='v1beta1/{+entityType}:readFeatureValues',
request_field='googleCloudAiplatformV1beta1ReadFeatureValuesRequest',
request_type_name='AiplatformProjectsLocationsFeaturestoresEntityTypesReadFeatureValuesRequest',
response_type_name='GoogleCloudAiplatformV1beta1ReadFeatureValuesResponse',
supports_download=False,
)
def StreamingReadFeatureValues(self, request, global_params=None):
r"""Reads Feature values for multiple entities. Depending on their size, data for different entities may be broken up across multiple responses.
Args:
request: (AiplatformProjectsLocationsFeaturestoresEntityTypesStreamingReadFeatureValuesRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1ReadFeatureValuesResponse) The response message.
"""
config = self.GetMethodConfig('StreamingReadFeatureValues')
return self._RunMethod(
config, request, global_params=global_params)
StreamingReadFeatureValues.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/featurestores/{featurestoresId}/entityTypes/{entityTypesId}:streamingReadFeatureValues',
http_method='POST',
method_id='aiplatform.projects.locations.featurestores.entityTypes.streamingReadFeatureValues',
ordered_params=['entityType'],
path_params=['entityType'],
query_params=[],
relative_path='v1beta1/{+entityType}:streamingReadFeatureValues',
request_field='googleCloudAiplatformV1beta1StreamingReadFeatureValuesRequest',
request_type_name='AiplatformProjectsLocationsFeaturestoresEntityTypesStreamingReadFeatureValuesRequest',
response_type_name='GoogleCloudAiplatformV1beta1ReadFeatureValuesResponse',
supports_download=False,
)
class ProjectsLocationsFeaturestoresOperationsService(base_api.BaseApiService):
"""Service class for the projects_locations_featurestores_operations resource."""
_NAME = 'projects_locations_featurestores_operations'
def __init__(self, client):
super(AiplatformV1beta1.ProjectsLocationsFeaturestoresOperationsService, self).__init__(client)
self._upload_configs = {
}
def Cancel(self, request, global_params=None):
r"""Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.
Args:
request: (AiplatformProjectsLocationsFeaturestoresOperationsCancelRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Cancel')
return self._RunMethod(
config, request, global_params=global_params)
Cancel.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/featurestores/{featurestoresId}/operations/{operationsId}:cancel',
http_method='POST',
method_id='aiplatform.projects.locations.featurestores.operations.cancel',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}:cancel',
request_field='',
request_type_name='AiplatformProjectsLocationsFeaturestoresOperationsCancelRequest',
response_type_name='GoogleProtobufEmpty',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes a long-running operation. This method indicates that the client is no longer interested in the operation result. It does not cancel the operation. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`.
Args:
request: (AiplatformProjectsLocationsFeaturestoresOperationsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/featurestores/{featurestoresId}/operations/{operationsId}',
http_method='DELETE',
method_id='aiplatform.projects.locations.featurestores.operations.delete',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsFeaturestoresOperationsDeleteRequest',
response_type_name='GoogleProtobufEmpty',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets the latest state of a long-running operation. Clients can use this method to poll the operation result at intervals as recommended by the API service.
Args:
request: (AiplatformProjectsLocationsFeaturestoresOperationsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/featurestores/{featurestoresId}/operations/{operationsId}',
http_method='GET',
method_id='aiplatform.projects.locations.featurestores.operations.get',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsFeaturestoresOperationsGetRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists operations that match the specified filter in the request. If the server doesn't support this method, it returns `UNIMPLEMENTED`. NOTE: the `name` binding allows API services to override the binding to use different resource name schemes, such as `users/*/operations`. To override the binding, API services can add a binding such as `"/v1/{name=users/*}/operations"` to their service configuration. For backwards compatibility, the default name includes the operations collection id, however overriding users must ensure the name binding is the parent resource, without the operations collection id.
Args:
request: (AiplatformProjectsLocationsFeaturestoresOperationsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningListOperationsResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/featurestores/{featurestoresId}/operations',
http_method='GET',
method_id='aiplatform.projects.locations.featurestores.operations.list',
ordered_params=['name'],
path_params=['name'],
query_params=['filter', 'pageSize', 'pageToken'],
relative_path='v1beta1/{+name}/operations',
request_field='',
request_type_name='AiplatformProjectsLocationsFeaturestoresOperationsListRequest',
response_type_name='GoogleLongrunningListOperationsResponse',
supports_download=False,
)
def Wait(self, request, global_params=None):
r"""Waits until the specified long-running operation is done or reaches at most a specified timeout, returning the latest state. If the operation is already done, the latest state is immediately returned. If the timeout specified is greater than the default HTTP/RPC timeout, the HTTP/RPC timeout is used. If the server does not support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Note that this method is on a best-effort basis. It may return the latest state before the specified timeout (including immediately), meaning even an immediate response is no guarantee that the operation is done.
Args:
request: (AiplatformProjectsLocationsFeaturestoresOperationsWaitRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Wait')
return self._RunMethod(
config, request, global_params=global_params)
Wait.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/featurestores/{featurestoresId}/operations/{operationsId}:wait',
http_method='POST',
method_id='aiplatform.projects.locations.featurestores.operations.wait',
ordered_params=['name'],
path_params=['name'],
query_params=['timeout'],
relative_path='v1beta1/{+name}:wait',
request_field='',
request_type_name='AiplatformProjectsLocationsFeaturestoresOperationsWaitRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
class ProjectsLocationsFeaturestoresService(base_api.BaseApiService):
"""Service class for the projects_locations_featurestores resource."""
_NAME = 'projects_locations_featurestores'
def __init__(self, client):
super(AiplatformV1beta1.ProjectsLocationsFeaturestoresService, self).__init__(client)
self._upload_configs = {
}
def BatchReadFeatureValues(self, request, global_params=None):
r"""Batch reads Feature values from a Featurestore. This API enables batch reading Feature values, where each read instance in the batch may read Feature values of entities from one or more EntityTypes. Point-in-time correctness is guaranteed for Feature values of each read instance as of each instance's read timestamp.
Args:
request: (AiplatformProjectsLocationsFeaturestoresBatchReadFeatureValuesRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('BatchReadFeatureValues')
return self._RunMethod(
config, request, global_params=global_params)
BatchReadFeatureValues.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/featurestores/{featurestoresId}:batchReadFeatureValues',
http_method='POST',
method_id='aiplatform.projects.locations.featurestores.batchReadFeatureValues',
ordered_params=['featurestore'],
path_params=['featurestore'],
query_params=[],
relative_path='v1beta1/{+featurestore}:batchReadFeatureValues',
request_field='googleCloudAiplatformV1beta1BatchReadFeatureValuesRequest',
request_type_name='AiplatformProjectsLocationsFeaturestoresBatchReadFeatureValuesRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def Create(self, request, global_params=None):
r"""Creates a new Featurestore in a given project and location.
Args:
request: (AiplatformProjectsLocationsFeaturestoresCreateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Create')
return self._RunMethod(
config, request, global_params=global_params)
Create.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/featurestores',
http_method='POST',
method_id='aiplatform.projects.locations.featurestores.create',
ordered_params=['parent'],
path_params=['parent'],
query_params=['featurestoreId'],
relative_path='v1beta1/{+parent}/featurestores',
request_field='googleCloudAiplatformV1beta1Featurestore',
request_type_name='AiplatformProjectsLocationsFeaturestoresCreateRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes a single Featurestore. The Featurestore must not contain any EntityTypes or `force` must be set to true for the request to succeed.
Args:
request: (AiplatformProjectsLocationsFeaturestoresDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/featurestores/{featurestoresId}',
http_method='DELETE',
method_id='aiplatform.projects.locations.featurestores.delete',
ordered_params=['name'],
path_params=['name'],
query_params=['force'],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsFeaturestoresDeleteRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets details of a single Featurestore.
Args:
request: (AiplatformProjectsLocationsFeaturestoresGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1Featurestore) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/featurestores/{featurestoresId}',
http_method='GET',
method_id='aiplatform.projects.locations.featurestores.get',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsFeaturestoresGetRequest',
response_type_name='GoogleCloudAiplatformV1beta1Featurestore',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists Featurestores in a given project and location.
Args:
request: (AiplatformProjectsLocationsFeaturestoresListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1ListFeaturestoresResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/featurestores',
http_method='GET',
method_id='aiplatform.projects.locations.featurestores.list',
ordered_params=['parent'],
path_params=['parent'],
query_params=['filter', 'orderBy', 'pageSize', 'pageToken', 'readMask'],
relative_path='v1beta1/{+parent}/featurestores',
request_field='',
request_type_name='AiplatformProjectsLocationsFeaturestoresListRequest',
response_type_name='GoogleCloudAiplatformV1beta1ListFeaturestoresResponse',
supports_download=False,
)
def Patch(self, request, global_params=None):
r"""Updates the parameters of a single Featurestore.
Args:
request: (AiplatformProjectsLocationsFeaturestoresPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/featurestores/{featurestoresId}',
http_method='PATCH',
method_id='aiplatform.projects.locations.featurestores.patch',
ordered_params=['name'],
path_params=['name'],
query_params=['updateMask'],
relative_path='v1beta1/{+name}',
request_field='googleCloudAiplatformV1beta1Featurestore',
request_type_name='AiplatformProjectsLocationsFeaturestoresPatchRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def SearchFeatures(self, request, global_params=None):
r"""Searches Features matching a query in a given project.
Args:
request: (AiplatformProjectsLocationsFeaturestoresSearchFeaturesRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1SearchFeaturesResponse) The response message.
"""
config = self.GetMethodConfig('SearchFeatures')
return self._RunMethod(
config, request, global_params=global_params)
SearchFeatures.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/featurestores:searchFeatures',
http_method='GET',
method_id='aiplatform.projects.locations.featurestores.searchFeatures',
ordered_params=['location'],
path_params=['location'],
query_params=['pageSize', 'pageToken', 'query'],
relative_path='v1beta1/{+location}/featurestores:searchFeatures',
request_field='',
request_type_name='AiplatformProjectsLocationsFeaturestoresSearchFeaturesRequest',
response_type_name='GoogleCloudAiplatformV1beta1SearchFeaturesResponse',
supports_download=False,
)
class ProjectsLocationsHyperparameterTuningJobsOperationsService(base_api.BaseApiService):
"""Service class for the projects_locations_hyperparameterTuningJobs_operations resource."""
_NAME = 'projects_locations_hyperparameterTuningJobs_operations'
def __init__(self, client):
super(AiplatformV1beta1.ProjectsLocationsHyperparameterTuningJobsOperationsService, self).__init__(client)
self._upload_configs = {
}
def Cancel(self, request, global_params=None):
r"""Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.
Args:
request: (AiplatformProjectsLocationsHyperparameterTuningJobsOperationsCancelRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Cancel')
return self._RunMethod(
config, request, global_params=global_params)
Cancel.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/hyperparameterTuningJobs/{hyperparameterTuningJobsId}/operations/{operationsId}:cancel',
http_method='POST',
method_id='aiplatform.projects.locations.hyperparameterTuningJobs.operations.cancel',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}:cancel',
request_field='',
request_type_name='AiplatformProjectsLocationsHyperparameterTuningJobsOperationsCancelRequest',
response_type_name='GoogleProtobufEmpty',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes a long-running operation. This method indicates that the client is no longer interested in the operation result. It does not cancel the operation. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`.
Args:
request: (AiplatformProjectsLocationsHyperparameterTuningJobsOperationsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/hyperparameterTuningJobs/{hyperparameterTuningJobsId}/operations/{operationsId}',
http_method='DELETE',
method_id='aiplatform.projects.locations.hyperparameterTuningJobs.operations.delete',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsHyperparameterTuningJobsOperationsDeleteRequest',
response_type_name='GoogleProtobufEmpty',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets the latest state of a long-running operation. Clients can use this method to poll the operation result at intervals as recommended by the API service.
Args:
request: (AiplatformProjectsLocationsHyperparameterTuningJobsOperationsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/hyperparameterTuningJobs/{hyperparameterTuningJobsId}/operations/{operationsId}',
http_method='GET',
method_id='aiplatform.projects.locations.hyperparameterTuningJobs.operations.get',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsHyperparameterTuningJobsOperationsGetRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists operations that match the specified filter in the request. If the server doesn't support this method, it returns `UNIMPLEMENTED`. NOTE: the `name` binding allows API services to override the binding to use different resource name schemes, such as `users/*/operations`. To override the binding, API services can add a binding such as `"/v1/{name=users/*}/operations"` to their service configuration. For backwards compatibility, the default name includes the operations collection id, however overriding users must ensure the name binding is the parent resource, without the operations collection id.
Args:
request: (AiplatformProjectsLocationsHyperparameterTuningJobsOperationsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningListOperationsResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/hyperparameterTuningJobs/{hyperparameterTuningJobsId}/operations',
http_method='GET',
method_id='aiplatform.projects.locations.hyperparameterTuningJobs.operations.list',
ordered_params=['name'],
path_params=['name'],
query_params=['filter', 'pageSize', 'pageToken'],
relative_path='v1beta1/{+name}/operations',
request_field='',
request_type_name='AiplatformProjectsLocationsHyperparameterTuningJobsOperationsListRequest',
response_type_name='GoogleLongrunningListOperationsResponse',
supports_download=False,
)
def Wait(self, request, global_params=None):
r"""Waits until the specified long-running operation is done or reaches at most a specified timeout, returning the latest state. If the operation is already done, the latest state is immediately returned. If the timeout specified is greater than the default HTTP/RPC timeout, the HTTP/RPC timeout is used. If the server does not support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Note that this method is on a best-effort basis. It may return the latest state before the specified timeout (including immediately), meaning even an immediate response is no guarantee that the operation is done.
Args:
request: (AiplatformProjectsLocationsHyperparameterTuningJobsOperationsWaitRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Wait')
return self._RunMethod(
config, request, global_params=global_params)
Wait.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/hyperparameterTuningJobs/{hyperparameterTuningJobsId}/operations/{operationsId}:wait',
http_method='POST',
method_id='aiplatform.projects.locations.hyperparameterTuningJobs.operations.wait',
ordered_params=['name'],
path_params=['name'],
query_params=['timeout'],
relative_path='v1beta1/{+name}:wait',
request_field='',
request_type_name='AiplatformProjectsLocationsHyperparameterTuningJobsOperationsWaitRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
class ProjectsLocationsHyperparameterTuningJobsService(base_api.BaseApiService):
"""Service class for the projects_locations_hyperparameterTuningJobs resource."""
_NAME = 'projects_locations_hyperparameterTuningJobs'
def __init__(self, client):
super(AiplatformV1beta1.ProjectsLocationsHyperparameterTuningJobsService, self).__init__(client)
self._upload_configs = {
}
def Cancel(self, request, global_params=None):
r"""Cancels a HyperparameterTuningJob. Starts asynchronous cancellation on the HyperparameterTuningJob. The server makes a best effort to cancel the job, but success is not guaranteed. Clients can use JobService.GetHyperparameterTuningJob or other methods to check whether the cancellation succeeded or whether the job completed despite cancellation. On successful cancellation, the HyperparameterTuningJob is not deleted; instead it becomes a job with a HyperparameterTuningJob.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`, and HyperparameterTuningJob.state is set to `CANCELLED`.
Args:
request: (AiplatformProjectsLocationsHyperparameterTuningJobsCancelRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Cancel')
return self._RunMethod(
config, request, global_params=global_params)
Cancel.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/hyperparameterTuningJobs/{hyperparameterTuningJobsId}:cancel',
http_method='POST',
method_id='aiplatform.projects.locations.hyperparameterTuningJobs.cancel',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}:cancel',
request_field='googleCloudAiplatformV1beta1CancelHyperparameterTuningJobRequest',
request_type_name='AiplatformProjectsLocationsHyperparameterTuningJobsCancelRequest',
response_type_name='GoogleProtobufEmpty',
supports_download=False,
)
def Create(self, request, global_params=None):
r"""Creates a HyperparameterTuningJob.
Args:
request: (AiplatformProjectsLocationsHyperparameterTuningJobsCreateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1HyperparameterTuningJob) The response message.
"""
config = self.GetMethodConfig('Create')
return self._RunMethod(
config, request, global_params=global_params)
Create.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/hyperparameterTuningJobs',
http_method='POST',
method_id='aiplatform.projects.locations.hyperparameterTuningJobs.create',
ordered_params=['parent'],
path_params=['parent'],
query_params=[],
relative_path='v1beta1/{+parent}/hyperparameterTuningJobs',
request_field='googleCloudAiplatformV1beta1HyperparameterTuningJob',
request_type_name='AiplatformProjectsLocationsHyperparameterTuningJobsCreateRequest',
response_type_name='GoogleCloudAiplatformV1beta1HyperparameterTuningJob',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes a HyperparameterTuningJob.
Args:
request: (AiplatformProjectsLocationsHyperparameterTuningJobsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/hyperparameterTuningJobs/{hyperparameterTuningJobsId}',
http_method='DELETE',
method_id='aiplatform.projects.locations.hyperparameterTuningJobs.delete',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsHyperparameterTuningJobsDeleteRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets a HyperparameterTuningJob.
Args:
request: (AiplatformProjectsLocationsHyperparameterTuningJobsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1HyperparameterTuningJob) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/hyperparameterTuningJobs/{hyperparameterTuningJobsId}',
http_method='GET',
method_id='aiplatform.projects.locations.hyperparameterTuningJobs.get',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsHyperparameterTuningJobsGetRequest',
response_type_name='GoogleCloudAiplatformV1beta1HyperparameterTuningJob',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists HyperparameterTuningJobs in a Location.
Args:
request: (AiplatformProjectsLocationsHyperparameterTuningJobsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1ListHyperparameterTuningJobsResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/hyperparameterTuningJobs',
http_method='GET',
method_id='aiplatform.projects.locations.hyperparameterTuningJobs.list',
ordered_params=['parent'],
path_params=['parent'],
query_params=['filter', 'pageSize', 'pageToken', 'readMask'],
relative_path='v1beta1/{+parent}/hyperparameterTuningJobs',
request_field='',
request_type_name='AiplatformProjectsLocationsHyperparameterTuningJobsListRequest',
response_type_name='GoogleCloudAiplatformV1beta1ListHyperparameterTuningJobsResponse',
supports_download=False,
)
class ProjectsLocationsIndexEndpointsOperationsService(base_api.BaseApiService):
"""Service class for the projects_locations_indexEndpoints_operations resource."""
_NAME = 'projects_locations_indexEndpoints_operations'
def __init__(self, client):
super(AiplatformV1beta1.ProjectsLocationsIndexEndpointsOperationsService, self).__init__(client)
self._upload_configs = {
}
def Cancel(self, request, global_params=None):
r"""Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.
Args:
request: (AiplatformProjectsLocationsIndexEndpointsOperationsCancelRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Cancel')
return self._RunMethod(
config, request, global_params=global_params)
Cancel.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/indexEndpoints/{indexEndpointsId}/operations/{operationsId}:cancel',
http_method='POST',
method_id='aiplatform.projects.locations.indexEndpoints.operations.cancel',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}:cancel',
request_field='',
request_type_name='AiplatformProjectsLocationsIndexEndpointsOperationsCancelRequest',
response_type_name='GoogleProtobufEmpty',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes a long-running operation. This method indicates that the client is no longer interested in the operation result. It does not cancel the operation. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`.
Args:
request: (AiplatformProjectsLocationsIndexEndpointsOperationsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/indexEndpoints/{indexEndpointsId}/operations/{operationsId}',
http_method='DELETE',
method_id='aiplatform.projects.locations.indexEndpoints.operations.delete',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsIndexEndpointsOperationsDeleteRequest',
response_type_name='GoogleProtobufEmpty',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets the latest state of a long-running operation. Clients can use this method to poll the operation result at intervals as recommended by the API service.
Args:
request: (AiplatformProjectsLocationsIndexEndpointsOperationsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/indexEndpoints/{indexEndpointsId}/operations/{operationsId}',
http_method='GET',
method_id='aiplatform.projects.locations.indexEndpoints.operations.get',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsIndexEndpointsOperationsGetRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists operations that match the specified filter in the request. If the server doesn't support this method, it returns `UNIMPLEMENTED`. NOTE: the `name` binding allows API services to override the binding to use different resource name schemes, such as `users/*/operations`. To override the binding, API services can add a binding such as `"/v1/{name=users/*}/operations"` to their service configuration. For backwards compatibility, the default name includes the operations collection id, however overriding users must ensure the name binding is the parent resource, without the operations collection id.
Args:
request: (AiplatformProjectsLocationsIndexEndpointsOperationsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningListOperationsResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/indexEndpoints/{indexEndpointsId}/operations',
http_method='GET',
method_id='aiplatform.projects.locations.indexEndpoints.operations.list',
ordered_params=['name'],
path_params=['name'],
query_params=['filter', 'pageSize', 'pageToken'],
relative_path='v1beta1/{+name}/operations',
request_field='',
request_type_name='AiplatformProjectsLocationsIndexEndpointsOperationsListRequest',
response_type_name='GoogleLongrunningListOperationsResponse',
supports_download=False,
)
def Wait(self, request, global_params=None):
r"""Waits until the specified long-running operation is done or reaches at most a specified timeout, returning the latest state. If the operation is already done, the latest state is immediately returned. If the timeout specified is greater than the default HTTP/RPC timeout, the HTTP/RPC timeout is used. If the server does not support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Note that this method is on a best-effort basis. It may return the latest state before the specified timeout (including immediately), meaning even an immediate response is no guarantee that the operation is done.
Args:
request: (AiplatformProjectsLocationsIndexEndpointsOperationsWaitRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Wait')
return self._RunMethod(
config, request, global_params=global_params)
Wait.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/indexEndpoints/{indexEndpointsId}/operations/{operationsId}:wait',
http_method='POST',
method_id='aiplatform.projects.locations.indexEndpoints.operations.wait',
ordered_params=['name'],
path_params=['name'],
query_params=['timeout'],
relative_path='v1beta1/{+name}:wait',
request_field='',
request_type_name='AiplatformProjectsLocationsIndexEndpointsOperationsWaitRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
class ProjectsLocationsIndexEndpointsService(base_api.BaseApiService):
"""Service class for the projects_locations_indexEndpoints resource."""
_NAME = 'projects_locations_indexEndpoints'
def __init__(self, client):
super(AiplatformV1beta1.ProjectsLocationsIndexEndpointsService, self).__init__(client)
self._upload_configs = {
}
def Create(self, request, global_params=None):
r"""Creates an IndexEndpoint.
Args:
request: (AiplatformProjectsLocationsIndexEndpointsCreateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Create')
return self._RunMethod(
config, request, global_params=global_params)
Create.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/indexEndpoints',
http_method='POST',
method_id='aiplatform.projects.locations.indexEndpoints.create',
ordered_params=['parent'],
path_params=['parent'],
query_params=[],
relative_path='v1beta1/{+parent}/indexEndpoints',
request_field='googleCloudAiplatformV1beta1IndexEndpoint',
request_type_name='AiplatformProjectsLocationsIndexEndpointsCreateRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes an IndexEndpoint.
Args:
request: (AiplatformProjectsLocationsIndexEndpointsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/indexEndpoints/{indexEndpointsId}',
http_method='DELETE',
method_id='aiplatform.projects.locations.indexEndpoints.delete',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsIndexEndpointsDeleteRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def DeployIndex(self, request, global_params=None):
r"""Deploys an Index into this IndexEndpoint, creating a DeployedIndex within it. Only non-empty Indexes can be deployed.
Args:
request: (AiplatformProjectsLocationsIndexEndpointsDeployIndexRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('DeployIndex')
return self._RunMethod(
config, request, global_params=global_params)
DeployIndex.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/indexEndpoints/{indexEndpointsId}:deployIndex',
http_method='POST',
method_id='aiplatform.projects.locations.indexEndpoints.deployIndex',
ordered_params=['indexEndpoint'],
path_params=['indexEndpoint'],
query_params=[],
relative_path='v1beta1/{+indexEndpoint}:deployIndex',
request_field='googleCloudAiplatformV1beta1DeployIndexRequest',
request_type_name='AiplatformProjectsLocationsIndexEndpointsDeployIndexRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets an IndexEndpoint.
Args:
request: (AiplatformProjectsLocationsIndexEndpointsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1IndexEndpoint) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/indexEndpoints/{indexEndpointsId}',
http_method='GET',
method_id='aiplatform.projects.locations.indexEndpoints.get',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsIndexEndpointsGetRequest',
response_type_name='GoogleCloudAiplatformV1beta1IndexEndpoint',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists IndexEndpoints in a Location.
Args:
request: (AiplatformProjectsLocationsIndexEndpointsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1ListIndexEndpointsResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/indexEndpoints',
http_method='GET',
method_id='aiplatform.projects.locations.indexEndpoints.list',
ordered_params=['parent'],
path_params=['parent'],
query_params=['filter', 'pageSize', 'pageToken', 'readMask'],
relative_path='v1beta1/{+parent}/indexEndpoints',
request_field='',
request_type_name='AiplatformProjectsLocationsIndexEndpointsListRequest',
response_type_name='GoogleCloudAiplatformV1beta1ListIndexEndpointsResponse',
supports_download=False,
)
def MutateDeployedIndex(self, request, global_params=None):
r"""Update an existing DeployedIndex under an IndexEndpoint.
Args:
request: (AiplatformProjectsLocationsIndexEndpointsMutateDeployedIndexRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('MutateDeployedIndex')
return self._RunMethod(
config, request, global_params=global_params)
MutateDeployedIndex.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/indexEndpoints/{indexEndpointsId}:mutateDeployedIndex',
http_method='POST',
method_id='aiplatform.projects.locations.indexEndpoints.mutateDeployedIndex',
ordered_params=['indexEndpoint'],
path_params=['indexEndpoint'],
query_params=[],
relative_path='v1beta1/{+indexEndpoint}:mutateDeployedIndex',
request_field='googleCloudAiplatformV1beta1DeployedIndex',
request_type_name='AiplatformProjectsLocationsIndexEndpointsMutateDeployedIndexRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def Patch(self, request, global_params=None):
r"""Updates an IndexEndpoint.
Args:
request: (AiplatformProjectsLocationsIndexEndpointsPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1IndexEndpoint) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/indexEndpoints/{indexEndpointsId}',
http_method='PATCH',
method_id='aiplatform.projects.locations.indexEndpoints.patch',
ordered_params=['name'],
path_params=['name'],
query_params=['updateMask'],
relative_path='v1beta1/{+name}',
request_field='googleCloudAiplatformV1beta1IndexEndpoint',
request_type_name='AiplatformProjectsLocationsIndexEndpointsPatchRequest',
response_type_name='GoogleCloudAiplatformV1beta1IndexEndpoint',
supports_download=False,
)
def UndeployIndex(self, request, global_params=None):
r"""Undeploys an Index from an IndexEndpoint, removing a DeployedIndex from it, and freeing all resources it's using.
Args:
request: (AiplatformProjectsLocationsIndexEndpointsUndeployIndexRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('UndeployIndex')
return self._RunMethod(
config, request, global_params=global_params)
UndeployIndex.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/indexEndpoints/{indexEndpointsId}:undeployIndex',
http_method='POST',
method_id='aiplatform.projects.locations.indexEndpoints.undeployIndex',
ordered_params=['indexEndpoint'],
path_params=['indexEndpoint'],
query_params=[],
relative_path='v1beta1/{+indexEndpoint}:undeployIndex',
request_field='googleCloudAiplatformV1beta1UndeployIndexRequest',
request_type_name='AiplatformProjectsLocationsIndexEndpointsUndeployIndexRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
class ProjectsLocationsIndexesOperationsService(base_api.BaseApiService):
"""Service class for the projects_locations_indexes_operations resource."""
_NAME = 'projects_locations_indexes_operations'
def __init__(self, client):
super(AiplatformV1beta1.ProjectsLocationsIndexesOperationsService, self).__init__(client)
self._upload_configs = {
}
def Cancel(self, request, global_params=None):
r"""Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.
Args:
request: (AiplatformProjectsLocationsIndexesOperationsCancelRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Cancel')
return self._RunMethod(
config, request, global_params=global_params)
Cancel.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/indexes/{indexesId}/operations/{operationsId}:cancel',
http_method='POST',
method_id='aiplatform.projects.locations.indexes.operations.cancel',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}:cancel',
request_field='',
request_type_name='AiplatformProjectsLocationsIndexesOperationsCancelRequest',
response_type_name='GoogleProtobufEmpty',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes a long-running operation. This method indicates that the client is no longer interested in the operation result. It does not cancel the operation. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`.
Args:
request: (AiplatformProjectsLocationsIndexesOperationsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/indexes/{indexesId}/operations/{operationsId}',
http_method='DELETE',
method_id='aiplatform.projects.locations.indexes.operations.delete',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsIndexesOperationsDeleteRequest',
response_type_name='GoogleProtobufEmpty',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets the latest state of a long-running operation. Clients can use this method to poll the operation result at intervals as recommended by the API service.
Args:
request: (AiplatformProjectsLocationsIndexesOperationsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/indexes/{indexesId}/operations/{operationsId}',
http_method='GET',
method_id='aiplatform.projects.locations.indexes.operations.get',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsIndexesOperationsGetRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists operations that match the specified filter in the request. If the server doesn't support this method, it returns `UNIMPLEMENTED`. NOTE: the `name` binding allows API services to override the binding to use different resource name schemes, such as `users/*/operations`. To override the binding, API services can add a binding such as `"/v1/{name=users/*}/operations"` to their service configuration. For backwards compatibility, the default name includes the operations collection id, however overriding users must ensure the name binding is the parent resource, without the operations collection id.
Args:
request: (AiplatformProjectsLocationsIndexesOperationsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningListOperationsResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/indexes/{indexesId}/operations',
http_method='GET',
method_id='aiplatform.projects.locations.indexes.operations.list',
ordered_params=['name'],
path_params=['name'],
query_params=['filter', 'pageSize', 'pageToken'],
relative_path='v1beta1/{+name}/operations',
request_field='',
request_type_name='AiplatformProjectsLocationsIndexesOperationsListRequest',
response_type_name='GoogleLongrunningListOperationsResponse',
supports_download=False,
)
def Wait(self, request, global_params=None):
r"""Waits until the specified long-running operation is done or reaches at most a specified timeout, returning the latest state. If the operation is already done, the latest state is immediately returned. If the timeout specified is greater than the default HTTP/RPC timeout, the HTTP/RPC timeout is used. If the server does not support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Note that this method is on a best-effort basis. It may return the latest state before the specified timeout (including immediately), meaning even an immediate response is no guarantee that the operation is done.
Args:
request: (AiplatformProjectsLocationsIndexesOperationsWaitRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Wait')
return self._RunMethod(
config, request, global_params=global_params)
Wait.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/indexes/{indexesId}/operations/{operationsId}:wait',
http_method='POST',
method_id='aiplatform.projects.locations.indexes.operations.wait',
ordered_params=['name'],
path_params=['name'],
query_params=['timeout'],
relative_path='v1beta1/{+name}:wait',
request_field='',
request_type_name='AiplatformProjectsLocationsIndexesOperationsWaitRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
class ProjectsLocationsIndexesService(base_api.BaseApiService):
"""Service class for the projects_locations_indexes resource."""
_NAME = 'projects_locations_indexes'
def __init__(self, client):
super(AiplatformV1beta1.ProjectsLocationsIndexesService, self).__init__(client)
self._upload_configs = {
}
def Create(self, request, global_params=None):
r"""Creates an Index.
Args:
request: (AiplatformProjectsLocationsIndexesCreateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Create')
return self._RunMethod(
config, request, global_params=global_params)
Create.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/indexes',
http_method='POST',
method_id='aiplatform.projects.locations.indexes.create',
ordered_params=['parent'],
path_params=['parent'],
query_params=[],
relative_path='v1beta1/{+parent}/indexes',
request_field='googleCloudAiplatformV1beta1Index',
request_type_name='AiplatformProjectsLocationsIndexesCreateRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes an Index. An Index can only be deleted when all its DeployedIndexes had been undeployed.
Args:
request: (AiplatformProjectsLocationsIndexesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/indexes/{indexesId}',
http_method='DELETE',
method_id='aiplatform.projects.locations.indexes.delete',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsIndexesDeleteRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets an Index.
Args:
request: (AiplatformProjectsLocationsIndexesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1Index) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/indexes/{indexesId}',
http_method='GET',
method_id='aiplatform.projects.locations.indexes.get',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsIndexesGetRequest',
response_type_name='GoogleCloudAiplatformV1beta1Index',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists Indexes in a Location.
Args:
request: (AiplatformProjectsLocationsIndexesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1ListIndexesResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/indexes',
http_method='GET',
method_id='aiplatform.projects.locations.indexes.list',
ordered_params=['parent'],
path_params=['parent'],
query_params=['filter', 'pageSize', 'pageToken', 'readMask'],
relative_path='v1beta1/{+parent}/indexes',
request_field='',
request_type_name='AiplatformProjectsLocationsIndexesListRequest',
response_type_name='GoogleCloudAiplatformV1beta1ListIndexesResponse',
supports_download=False,
)
def Patch(self, request, global_params=None):
r"""Updates an Index.
Args:
request: (AiplatformProjectsLocationsIndexesPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/indexes/{indexesId}',
http_method='PATCH',
method_id='aiplatform.projects.locations.indexes.patch',
ordered_params=['name'],
path_params=['name'],
query_params=['updateMask'],
relative_path='v1beta1/{+name}',
request_field='googleCloudAiplatformV1beta1Index',
request_type_name='AiplatformProjectsLocationsIndexesPatchRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
class ProjectsLocationsMetadataStoresArtifactsService(base_api.BaseApiService):
"""Service class for the projects_locations_metadataStores_artifacts resource."""
_NAME = 'projects_locations_metadataStores_artifacts'
def __init__(self, client):
super(AiplatformV1beta1.ProjectsLocationsMetadataStoresArtifactsService, self).__init__(client)
self._upload_configs = {
}
def Create(self, request, global_params=None):
r"""Creates an Artifact associated with a MetadataStore.
Args:
request: (AiplatformProjectsLocationsMetadataStoresArtifactsCreateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1Artifact) The response message.
"""
config = self.GetMethodConfig('Create')
return self._RunMethod(
config, request, global_params=global_params)
Create.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/metadataStores/{metadataStoresId}/artifacts',
http_method='POST',
method_id='aiplatform.projects.locations.metadataStores.artifacts.create',
ordered_params=['parent'],
path_params=['parent'],
query_params=['artifactId'],
relative_path='v1beta1/{+parent}/artifacts',
request_field='googleCloudAiplatformV1beta1Artifact',
request_type_name='AiplatformProjectsLocationsMetadataStoresArtifactsCreateRequest',
response_type_name='GoogleCloudAiplatformV1beta1Artifact',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes an Artifact.
Args:
request: (AiplatformProjectsLocationsMetadataStoresArtifactsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/metadataStores/{metadataStoresId}/artifacts/{artifactsId}',
http_method='DELETE',
method_id='aiplatform.projects.locations.metadataStores.artifacts.delete',
ordered_params=['name'],
path_params=['name'],
query_params=['etag'],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsMetadataStoresArtifactsDeleteRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Retrieves a specific Artifact.
Args:
request: (AiplatformProjectsLocationsMetadataStoresArtifactsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1Artifact) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/metadataStores/{metadataStoresId}/artifacts/{artifactsId}',
http_method='GET',
method_id='aiplatform.projects.locations.metadataStores.artifacts.get',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsMetadataStoresArtifactsGetRequest',
response_type_name='GoogleCloudAiplatformV1beta1Artifact',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists Artifacts in the MetadataStore.
Args:
request: (AiplatformProjectsLocationsMetadataStoresArtifactsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1ListArtifactsResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/metadataStores/{metadataStoresId}/artifacts',
http_method='GET',
method_id='aiplatform.projects.locations.metadataStores.artifacts.list',
ordered_params=['parent'],
path_params=['parent'],
query_params=['filter', 'pageSize', 'pageToken'],
relative_path='v1beta1/{+parent}/artifacts',
request_field='',
request_type_name='AiplatformProjectsLocationsMetadataStoresArtifactsListRequest',
response_type_name='GoogleCloudAiplatformV1beta1ListArtifactsResponse',
supports_download=False,
)
def Patch(self, request, global_params=None):
r"""Updates a stored Artifact.
Args:
request: (AiplatformProjectsLocationsMetadataStoresArtifactsPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1Artifact) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/metadataStores/{metadataStoresId}/artifacts/{artifactsId}',
http_method='PATCH',
method_id='aiplatform.projects.locations.metadataStores.artifacts.patch',
ordered_params=['name'],
path_params=['name'],
query_params=['allowMissing', 'updateMask'],
relative_path='v1beta1/{+name}',
request_field='googleCloudAiplatformV1beta1Artifact',
request_type_name='AiplatformProjectsLocationsMetadataStoresArtifactsPatchRequest',
response_type_name='GoogleCloudAiplatformV1beta1Artifact',
supports_download=False,
)
def Purge(self, request, global_params=None):
r"""Purges Artifacts.
Args:
request: (AiplatformProjectsLocationsMetadataStoresArtifactsPurgeRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Purge')
return self._RunMethod(
config, request, global_params=global_params)
Purge.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/metadataStores/{metadataStoresId}/artifacts:purge',
http_method='POST',
method_id='aiplatform.projects.locations.metadataStores.artifacts.purge',
ordered_params=['parent'],
path_params=['parent'],
query_params=[],
relative_path='v1beta1/{+parent}/artifacts:purge',
request_field='googleCloudAiplatformV1beta1PurgeArtifactsRequest',
request_type_name='AiplatformProjectsLocationsMetadataStoresArtifactsPurgeRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def QueryArtifactLineageSubgraph(self, request, global_params=None):
r"""Retrieves lineage of an Artifact represented through Artifacts and Executions connected by Event edges and returned as a LineageSubgraph.
Args:
request: (AiplatformProjectsLocationsMetadataStoresArtifactsQueryArtifactLineageSubgraphRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1LineageSubgraph) The response message.
"""
config = self.GetMethodConfig('QueryArtifactLineageSubgraph')
return self._RunMethod(
config, request, global_params=global_params)
QueryArtifactLineageSubgraph.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/metadataStores/{metadataStoresId}/artifacts/{artifactsId}:queryArtifactLineageSubgraph',
http_method='GET',
method_id='aiplatform.projects.locations.metadataStores.artifacts.queryArtifactLineageSubgraph',
ordered_params=['artifact'],
path_params=['artifact'],
query_params=['filter', 'maxHops'],
relative_path='v1beta1/{+artifact}:queryArtifactLineageSubgraph',
request_field='',
request_type_name='AiplatformProjectsLocationsMetadataStoresArtifactsQueryArtifactLineageSubgraphRequest',
response_type_name='GoogleCloudAiplatformV1beta1LineageSubgraph',
supports_download=False,
)
class ProjectsLocationsMetadataStoresContextsService(base_api.BaseApiService):
"""Service class for the projects_locations_metadataStores_contexts resource."""
_NAME = 'projects_locations_metadataStores_contexts'
def __init__(self, client):
super(AiplatformV1beta1.ProjectsLocationsMetadataStoresContextsService, self).__init__(client)
self._upload_configs = {
}
def AddContextArtifactsAndExecutions(self, request, global_params=None):
r"""Adds a set of Artifacts and Executions to a Context. If any of the Artifacts or Executions have already been added to a Context, they are simply skipped.
Args:
request: (AiplatformProjectsLocationsMetadataStoresContextsAddContextArtifactsAndExecutionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1AddContextArtifactsAndExecutionsResponse) The response message.
"""
config = self.GetMethodConfig('AddContextArtifactsAndExecutions')
return self._RunMethod(
config, request, global_params=global_params)
AddContextArtifactsAndExecutions.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/metadataStores/{metadataStoresId}/contexts/{contextsId}:addContextArtifactsAndExecutions',
http_method='POST',
method_id='aiplatform.projects.locations.metadataStores.contexts.addContextArtifactsAndExecutions',
ordered_params=['context'],
path_params=['context'],
query_params=[],
relative_path='v1beta1/{+context}:addContextArtifactsAndExecutions',
request_field='googleCloudAiplatformV1beta1AddContextArtifactsAndExecutionsRequest',
request_type_name='AiplatformProjectsLocationsMetadataStoresContextsAddContextArtifactsAndExecutionsRequest',
response_type_name='GoogleCloudAiplatformV1beta1AddContextArtifactsAndExecutionsResponse',
supports_download=False,
)
def AddContextChildren(self, request, global_params=None):
r"""Adds a set of Contexts as children to a parent Context. If any of the child Contexts have already been added to the parent Context, they are simply skipped. If this call would create a cycle or cause any Context to have more than 10 parents, the request will fail with an INVALID_ARGUMENT error.
Args:
request: (AiplatformProjectsLocationsMetadataStoresContextsAddContextChildrenRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1AddContextChildrenResponse) The response message.
"""
config = self.GetMethodConfig('AddContextChildren')
return self._RunMethod(
config, request, global_params=global_params)
AddContextChildren.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/metadataStores/{metadataStoresId}/contexts/{contextsId}:addContextChildren',
http_method='POST',
method_id='aiplatform.projects.locations.metadataStores.contexts.addContextChildren',
ordered_params=['context'],
path_params=['context'],
query_params=[],
relative_path='v1beta1/{+context}:addContextChildren',
request_field='googleCloudAiplatformV1beta1AddContextChildrenRequest',
request_type_name='AiplatformProjectsLocationsMetadataStoresContextsAddContextChildrenRequest',
response_type_name='GoogleCloudAiplatformV1beta1AddContextChildrenResponse',
supports_download=False,
)
def Create(self, request, global_params=None):
r"""Creates a Context associated with a MetadataStore.
Args:
request: (AiplatformProjectsLocationsMetadataStoresContextsCreateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1Context) The response message.
"""
config = self.GetMethodConfig('Create')
return self._RunMethod(
config, request, global_params=global_params)
Create.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/metadataStores/{metadataStoresId}/contexts',
http_method='POST',
method_id='aiplatform.projects.locations.metadataStores.contexts.create',
ordered_params=['parent'],
path_params=['parent'],
query_params=['contextId'],
relative_path='v1beta1/{+parent}/contexts',
request_field='googleCloudAiplatformV1beta1Context',
request_type_name='AiplatformProjectsLocationsMetadataStoresContextsCreateRequest',
response_type_name='GoogleCloudAiplatformV1beta1Context',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes a stored Context.
Args:
request: (AiplatformProjectsLocationsMetadataStoresContextsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/metadataStores/{metadataStoresId}/contexts/{contextsId}',
http_method='DELETE',
method_id='aiplatform.projects.locations.metadataStores.contexts.delete',
ordered_params=['name'],
path_params=['name'],
query_params=['etag', 'force'],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsMetadataStoresContextsDeleteRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Retrieves a specific Context.
Args:
request: (AiplatformProjectsLocationsMetadataStoresContextsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1Context) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/metadataStores/{metadataStoresId}/contexts/{contextsId}',
http_method='GET',
method_id='aiplatform.projects.locations.metadataStores.contexts.get',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsMetadataStoresContextsGetRequest',
response_type_name='GoogleCloudAiplatformV1beta1Context',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists Contexts on the MetadataStore.
Args:
request: (AiplatformProjectsLocationsMetadataStoresContextsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1ListContextsResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/metadataStores/{metadataStoresId}/contexts',
http_method='GET',
method_id='aiplatform.projects.locations.metadataStores.contexts.list',
ordered_params=['parent'],
path_params=['parent'],
query_params=['filter', 'pageSize', 'pageToken'],
relative_path='v1beta1/{+parent}/contexts',
request_field='',
request_type_name='AiplatformProjectsLocationsMetadataStoresContextsListRequest',
response_type_name='GoogleCloudAiplatformV1beta1ListContextsResponse',
supports_download=False,
)
def Patch(self, request, global_params=None):
r"""Updates a stored Context.
Args:
request: (AiplatformProjectsLocationsMetadataStoresContextsPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1Context) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/metadataStores/{metadataStoresId}/contexts/{contextsId}',
http_method='PATCH',
method_id='aiplatform.projects.locations.metadataStores.contexts.patch',
ordered_params=['name'],
path_params=['name'],
query_params=['allowMissing', 'updateMask'],
relative_path='v1beta1/{+name}',
request_field='googleCloudAiplatformV1beta1Context',
request_type_name='AiplatformProjectsLocationsMetadataStoresContextsPatchRequest',
response_type_name='GoogleCloudAiplatformV1beta1Context',
supports_download=False,
)
def Purge(self, request, global_params=None):
r"""Purges Contexts.
Args:
request: (AiplatformProjectsLocationsMetadataStoresContextsPurgeRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Purge')
return self._RunMethod(
config, request, global_params=global_params)
Purge.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/metadataStores/{metadataStoresId}/contexts:purge',
http_method='POST',
method_id='aiplatform.projects.locations.metadataStores.contexts.purge',
ordered_params=['parent'],
path_params=['parent'],
query_params=[],
relative_path='v1beta1/{+parent}/contexts:purge',
request_field='googleCloudAiplatformV1beta1PurgeContextsRequest',
request_type_name='AiplatformProjectsLocationsMetadataStoresContextsPurgeRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def QueryContextLineageSubgraph(self, request, global_params=None):
r"""Retrieves Artifacts and Executions within the specified Context, connected by Event edges and returned as a LineageSubgraph.
Args:
request: (AiplatformProjectsLocationsMetadataStoresContextsQueryContextLineageSubgraphRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1LineageSubgraph) The response message.
"""
config = self.GetMethodConfig('QueryContextLineageSubgraph')
return self._RunMethod(
config, request, global_params=global_params)
QueryContextLineageSubgraph.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/metadataStores/{metadataStoresId}/contexts/{contextsId}:queryContextLineageSubgraph',
http_method='GET',
method_id='aiplatform.projects.locations.metadataStores.contexts.queryContextLineageSubgraph',
ordered_params=['context'],
path_params=['context'],
query_params=[],
relative_path='v1beta1/{+context}:queryContextLineageSubgraph',
request_field='',
request_type_name='AiplatformProjectsLocationsMetadataStoresContextsQueryContextLineageSubgraphRequest',
response_type_name='GoogleCloudAiplatformV1beta1LineageSubgraph',
supports_download=False,
)
class ProjectsLocationsMetadataStoresExecutionsService(base_api.BaseApiService):
"""Service class for the projects_locations_metadataStores_executions resource."""
_NAME = 'projects_locations_metadataStores_executions'
def __init__(self, client):
super(AiplatformV1beta1.ProjectsLocationsMetadataStoresExecutionsService, self).__init__(client)
self._upload_configs = {
}
def AddExecutionEvents(self, request, global_params=None):
r"""Adds Events to the specified Execution. An Event indicates whether an Artifact was used as an input or output for an Execution. If an Event already exists between the Execution and the Artifact, the Event is skipped.
Args:
request: (AiplatformProjectsLocationsMetadataStoresExecutionsAddExecutionEventsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1AddExecutionEventsResponse) The response message.
"""
config = self.GetMethodConfig('AddExecutionEvents')
return self._RunMethod(
config, request, global_params=global_params)
AddExecutionEvents.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/metadataStores/{metadataStoresId}/executions/{executionsId}:addExecutionEvents',
http_method='POST',
method_id='aiplatform.projects.locations.metadataStores.executions.addExecutionEvents',
ordered_params=['execution'],
path_params=['execution'],
query_params=[],
relative_path='v1beta1/{+execution}:addExecutionEvents',
request_field='googleCloudAiplatformV1beta1AddExecutionEventsRequest',
request_type_name='AiplatformProjectsLocationsMetadataStoresExecutionsAddExecutionEventsRequest',
response_type_name='GoogleCloudAiplatformV1beta1AddExecutionEventsResponse',
supports_download=False,
)
def Create(self, request, global_params=None):
r"""Creates an Execution associated with a MetadataStore.
Args:
request: (AiplatformProjectsLocationsMetadataStoresExecutionsCreateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1Execution) The response message.
"""
config = self.GetMethodConfig('Create')
return self._RunMethod(
config, request, global_params=global_params)
Create.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/metadataStores/{metadataStoresId}/executions',
http_method='POST',
method_id='aiplatform.projects.locations.metadataStores.executions.create',
ordered_params=['parent'],
path_params=['parent'],
query_params=['executionId'],
relative_path='v1beta1/{+parent}/executions',
request_field='googleCloudAiplatformV1beta1Execution',
request_type_name='AiplatformProjectsLocationsMetadataStoresExecutionsCreateRequest',
response_type_name='GoogleCloudAiplatformV1beta1Execution',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes an Execution.
Args:
request: (AiplatformProjectsLocationsMetadataStoresExecutionsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/metadataStores/{metadataStoresId}/executions/{executionsId}',
http_method='DELETE',
method_id='aiplatform.projects.locations.metadataStores.executions.delete',
ordered_params=['name'],
path_params=['name'],
query_params=['etag'],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsMetadataStoresExecutionsDeleteRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Retrieves a specific Execution.
Args:
request: (AiplatformProjectsLocationsMetadataStoresExecutionsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1Execution) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/metadataStores/{metadataStoresId}/executions/{executionsId}',
http_method='GET',
method_id='aiplatform.projects.locations.metadataStores.executions.get',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsMetadataStoresExecutionsGetRequest',
response_type_name='GoogleCloudAiplatformV1beta1Execution',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists Executions in the MetadataStore.
Args:
request: (AiplatformProjectsLocationsMetadataStoresExecutionsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1ListExecutionsResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/metadataStores/{metadataStoresId}/executions',
http_method='GET',
method_id='aiplatform.projects.locations.metadataStores.executions.list',
ordered_params=['parent'],
path_params=['parent'],
query_params=['filter', 'pageSize', 'pageToken'],
relative_path='v1beta1/{+parent}/executions',
request_field='',
request_type_name='AiplatformProjectsLocationsMetadataStoresExecutionsListRequest',
response_type_name='GoogleCloudAiplatformV1beta1ListExecutionsResponse',
supports_download=False,
)
def Patch(self, request, global_params=None):
r"""Updates a stored Execution.
Args:
request: (AiplatformProjectsLocationsMetadataStoresExecutionsPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1Execution) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/metadataStores/{metadataStoresId}/executions/{executionsId}',
http_method='PATCH',
method_id='aiplatform.projects.locations.metadataStores.executions.patch',
ordered_params=['name'],
path_params=['name'],
query_params=['allowMissing', 'updateMask'],
relative_path='v1beta1/{+name}',
request_field='googleCloudAiplatformV1beta1Execution',
request_type_name='AiplatformProjectsLocationsMetadataStoresExecutionsPatchRequest',
response_type_name='GoogleCloudAiplatformV1beta1Execution',
supports_download=False,
)
def Purge(self, request, global_params=None):
r"""Purges Executions.
Args:
request: (AiplatformProjectsLocationsMetadataStoresExecutionsPurgeRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Purge')
return self._RunMethod(
config, request, global_params=global_params)
Purge.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/metadataStores/{metadataStoresId}/executions:purge',
http_method='POST',
method_id='aiplatform.projects.locations.metadataStores.executions.purge',
ordered_params=['parent'],
path_params=['parent'],
query_params=[],
relative_path='v1beta1/{+parent}/executions:purge',
request_field='googleCloudAiplatformV1beta1PurgeExecutionsRequest',
request_type_name='AiplatformProjectsLocationsMetadataStoresExecutionsPurgeRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def QueryExecutionInputsAndOutputs(self, request, global_params=None):
r"""Obtains the set of input and output Artifacts for this Execution, in the form of LineageSubgraph that also contains the Execution and connecting Events.
Args:
request: (AiplatformProjectsLocationsMetadataStoresExecutionsQueryExecutionInputsAndOutputsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1LineageSubgraph) The response message.
"""
config = self.GetMethodConfig('QueryExecutionInputsAndOutputs')
return self._RunMethod(
config, request, global_params=global_params)
QueryExecutionInputsAndOutputs.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/metadataStores/{metadataStoresId}/executions/{executionsId}:queryExecutionInputsAndOutputs',
http_method='GET',
method_id='aiplatform.projects.locations.metadataStores.executions.queryExecutionInputsAndOutputs',
ordered_params=['execution'],
path_params=['execution'],
query_params=[],
relative_path='v1beta1/{+execution}:queryExecutionInputsAndOutputs',
request_field='',
request_type_name='AiplatformProjectsLocationsMetadataStoresExecutionsQueryExecutionInputsAndOutputsRequest',
response_type_name='GoogleCloudAiplatformV1beta1LineageSubgraph',
supports_download=False,
)
class ProjectsLocationsMetadataStoresMetadataSchemasService(base_api.BaseApiService):
"""Service class for the projects_locations_metadataStores_metadataSchemas resource."""
_NAME = 'projects_locations_metadataStores_metadataSchemas'
def __init__(self, client):
super(AiplatformV1beta1.ProjectsLocationsMetadataStoresMetadataSchemasService, self).__init__(client)
self._upload_configs = {
}
def Create(self, request, global_params=None):
r"""Creates a MetadataSchema.
Args:
request: (AiplatformProjectsLocationsMetadataStoresMetadataSchemasCreateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1MetadataSchema) The response message.
"""
config = self.GetMethodConfig('Create')
return self._RunMethod(
config, request, global_params=global_params)
Create.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/metadataStores/{metadataStoresId}/metadataSchemas',
http_method='POST',
method_id='aiplatform.projects.locations.metadataStores.metadataSchemas.create',
ordered_params=['parent'],
path_params=['parent'],
query_params=['metadataSchemaId'],
relative_path='v1beta1/{+parent}/metadataSchemas',
request_field='googleCloudAiplatformV1beta1MetadataSchema',
request_type_name='AiplatformProjectsLocationsMetadataStoresMetadataSchemasCreateRequest',
response_type_name='GoogleCloudAiplatformV1beta1MetadataSchema',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Retrieves a specific MetadataSchema.
Args:
request: (AiplatformProjectsLocationsMetadataStoresMetadataSchemasGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1MetadataSchema) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/metadataStores/{metadataStoresId}/metadataSchemas/{metadataSchemasId}',
http_method='GET',
method_id='aiplatform.projects.locations.metadataStores.metadataSchemas.get',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsMetadataStoresMetadataSchemasGetRequest',
response_type_name='GoogleCloudAiplatformV1beta1MetadataSchema',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists MetadataSchemas.
Args:
request: (AiplatformProjectsLocationsMetadataStoresMetadataSchemasListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1ListMetadataSchemasResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/metadataStores/{metadataStoresId}/metadataSchemas',
http_method='GET',
method_id='aiplatform.projects.locations.metadataStores.metadataSchemas.list',
ordered_params=['parent'],
path_params=['parent'],
query_params=['filter', 'pageSize', 'pageToken'],
relative_path='v1beta1/{+parent}/metadataSchemas',
request_field='',
request_type_name='AiplatformProjectsLocationsMetadataStoresMetadataSchemasListRequest',
response_type_name='GoogleCloudAiplatformV1beta1ListMetadataSchemasResponse',
supports_download=False,
)
class ProjectsLocationsMetadataStoresService(base_api.BaseApiService):
"""Service class for the projects_locations_metadataStores resource."""
_NAME = 'projects_locations_metadataStores'
def __init__(self, client):
super(AiplatformV1beta1.ProjectsLocationsMetadataStoresService, self).__init__(client)
self._upload_configs = {
}
def Create(self, request, global_params=None):
r"""Initializes a MetadataStore, including allocation of resources.
Args:
request: (AiplatformProjectsLocationsMetadataStoresCreateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Create')
return self._RunMethod(
config, request, global_params=global_params)
Create.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/metadataStores',
http_method='POST',
method_id='aiplatform.projects.locations.metadataStores.create',
ordered_params=['parent'],
path_params=['parent'],
query_params=['metadataStoreId'],
relative_path='v1beta1/{+parent}/metadataStores',
request_field='googleCloudAiplatformV1beta1MetadataStore',
request_type_name='AiplatformProjectsLocationsMetadataStoresCreateRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes a single MetadataStore and all its child resources (Artifacts, Executions, and Contexts).
Args:
request: (AiplatformProjectsLocationsMetadataStoresDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/metadataStores/{metadataStoresId}',
http_method='DELETE',
method_id='aiplatform.projects.locations.metadataStores.delete',
ordered_params=['name'],
path_params=['name'],
query_params=['force'],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsMetadataStoresDeleteRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Retrieves a specific MetadataStore.
Args:
request: (AiplatformProjectsLocationsMetadataStoresGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1MetadataStore) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/metadataStores/{metadataStoresId}',
http_method='GET',
method_id='aiplatform.projects.locations.metadataStores.get',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsMetadataStoresGetRequest',
response_type_name='GoogleCloudAiplatformV1beta1MetadataStore',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists MetadataStores for a Location.
Args:
request: (AiplatformProjectsLocationsMetadataStoresListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1ListMetadataStoresResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/metadataStores',
http_method='GET',
method_id='aiplatform.projects.locations.metadataStores.list',
ordered_params=['parent'],
path_params=['parent'],
query_params=['pageSize', 'pageToken'],
relative_path='v1beta1/{+parent}/metadataStores',
request_field='',
request_type_name='AiplatformProjectsLocationsMetadataStoresListRequest',
response_type_name='GoogleCloudAiplatformV1beta1ListMetadataStoresResponse',
supports_download=False,
)
class ProjectsLocationsMigratableResourcesOperationsService(base_api.BaseApiService):
"""Service class for the projects_locations_migratableResources_operations resource."""
_NAME = 'projects_locations_migratableResources_operations'
def __init__(self, client):
super(AiplatformV1beta1.ProjectsLocationsMigratableResourcesOperationsService, self).__init__(client)
self._upload_configs = {
}
def Cancel(self, request, global_params=None):
r"""Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.
Args:
request: (AiplatformProjectsLocationsMigratableResourcesOperationsCancelRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Cancel')
return self._RunMethod(
config, request, global_params=global_params)
Cancel.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/migratableResources/{migratableResourcesId}/operations/{operationsId}:cancel',
http_method='POST',
method_id='aiplatform.projects.locations.migratableResources.operations.cancel',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}:cancel',
request_field='',
request_type_name='AiplatformProjectsLocationsMigratableResourcesOperationsCancelRequest',
response_type_name='GoogleProtobufEmpty',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes a long-running operation. This method indicates that the client is no longer interested in the operation result. It does not cancel the operation. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`.
Args:
request: (AiplatformProjectsLocationsMigratableResourcesOperationsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/migratableResources/{migratableResourcesId}/operations/{operationsId}',
http_method='DELETE',
method_id='aiplatform.projects.locations.migratableResources.operations.delete',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsMigratableResourcesOperationsDeleteRequest',
response_type_name='GoogleProtobufEmpty',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets the latest state of a long-running operation. Clients can use this method to poll the operation result at intervals as recommended by the API service.
Args:
request: (AiplatformProjectsLocationsMigratableResourcesOperationsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/migratableResources/{migratableResourcesId}/operations/{operationsId}',
http_method='GET',
method_id='aiplatform.projects.locations.migratableResources.operations.get',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsMigratableResourcesOperationsGetRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists operations that match the specified filter in the request. If the server doesn't support this method, it returns `UNIMPLEMENTED`. NOTE: the `name` binding allows API services to override the binding to use different resource name schemes, such as `users/*/operations`. To override the binding, API services can add a binding such as `"/v1/{name=users/*}/operations"` to their service configuration. For backwards compatibility, the default name includes the operations collection id, however overriding users must ensure the name binding is the parent resource, without the operations collection id.
Args:
request: (AiplatformProjectsLocationsMigratableResourcesOperationsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningListOperationsResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/migratableResources/{migratableResourcesId}/operations',
http_method='GET',
method_id='aiplatform.projects.locations.migratableResources.operations.list',
ordered_params=['name'],
path_params=['name'],
query_params=['filter', 'pageSize', 'pageToken'],
relative_path='v1beta1/{+name}/operations',
request_field='',
request_type_name='AiplatformProjectsLocationsMigratableResourcesOperationsListRequest',
response_type_name='GoogleLongrunningListOperationsResponse',
supports_download=False,
)
def Wait(self, request, global_params=None):
r"""Waits until the specified long-running operation is done or reaches at most a specified timeout, returning the latest state. If the operation is already done, the latest state is immediately returned. If the timeout specified is greater than the default HTTP/RPC timeout, the HTTP/RPC timeout is used. If the server does not support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Note that this method is on a best-effort basis. It may return the latest state before the specified timeout (including immediately), meaning even an immediate response is no guarantee that the operation is done.
Args:
request: (AiplatformProjectsLocationsMigratableResourcesOperationsWaitRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Wait')
return self._RunMethod(
config, request, global_params=global_params)
Wait.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/migratableResources/{migratableResourcesId}/operations/{operationsId}:wait',
http_method='POST',
method_id='aiplatform.projects.locations.migratableResources.operations.wait',
ordered_params=['name'],
path_params=['name'],
query_params=['timeout'],
relative_path='v1beta1/{+name}:wait',
request_field='',
request_type_name='AiplatformProjectsLocationsMigratableResourcesOperationsWaitRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
class ProjectsLocationsMigratableResourcesService(base_api.BaseApiService):
"""Service class for the projects_locations_migratableResources resource."""
_NAME = 'projects_locations_migratableResources'
def __init__(self, client):
super(AiplatformV1beta1.ProjectsLocationsMigratableResourcesService, self).__init__(client)
self._upload_configs = {
}
def BatchMigrate(self, request, global_params=None):
r"""Batch migrates resources from ml.googleapis.com, automl.googleapis.com, and datalabeling.googleapis.com to Vertex AI.
Args:
request: (AiplatformProjectsLocationsMigratableResourcesBatchMigrateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('BatchMigrate')
return self._RunMethod(
config, request, global_params=global_params)
BatchMigrate.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/migratableResources:batchMigrate',
http_method='POST',
method_id='aiplatform.projects.locations.migratableResources.batchMigrate',
ordered_params=['parent'],
path_params=['parent'],
query_params=[],
relative_path='v1beta1/{+parent}/migratableResources:batchMigrate',
request_field='googleCloudAiplatformV1beta1BatchMigrateResourcesRequest',
request_type_name='AiplatformProjectsLocationsMigratableResourcesBatchMigrateRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def Search(self, request, global_params=None):
r"""Searches all of the resources in automl.googleapis.com, datalabeling.googleapis.com and ml.googleapis.com that can be migrated to Vertex AI's given location.
Args:
request: (AiplatformProjectsLocationsMigratableResourcesSearchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1SearchMigratableResourcesResponse) The response message.
"""
config = self.GetMethodConfig('Search')
return self._RunMethod(
config, request, global_params=global_params)
Search.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/migratableResources:search',
http_method='POST',
method_id='aiplatform.projects.locations.migratableResources.search',
ordered_params=['parent'],
path_params=['parent'],
query_params=[],
relative_path='v1beta1/{+parent}/migratableResources:search',
request_field='googleCloudAiplatformV1beta1SearchMigratableResourcesRequest',
request_type_name='AiplatformProjectsLocationsMigratableResourcesSearchRequest',
response_type_name='GoogleCloudAiplatformV1beta1SearchMigratableResourcesResponse',
supports_download=False,
)
class ProjectsLocationsModelDeploymentMonitoringJobsOperationsService(base_api.BaseApiService):
"""Service class for the projects_locations_modelDeploymentMonitoringJobs_operations resource."""
_NAME = 'projects_locations_modelDeploymentMonitoringJobs_operations'
def __init__(self, client):
super(AiplatformV1beta1.ProjectsLocationsModelDeploymentMonitoringJobsOperationsService, self).__init__(client)
self._upload_configs = {
}
def Cancel(self, request, global_params=None):
r"""Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.
Args:
request: (AiplatformProjectsLocationsModelDeploymentMonitoringJobsOperationsCancelRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Cancel')
return self._RunMethod(
config, request, global_params=global_params)
Cancel.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/modelDeploymentMonitoringJobs/{modelDeploymentMonitoringJobsId}/operations/{operationsId}:cancel',
http_method='POST',
method_id='aiplatform.projects.locations.modelDeploymentMonitoringJobs.operations.cancel',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}:cancel',
request_field='',
request_type_name='AiplatformProjectsLocationsModelDeploymentMonitoringJobsOperationsCancelRequest',
response_type_name='GoogleProtobufEmpty',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes a long-running operation. This method indicates that the client is no longer interested in the operation result. It does not cancel the operation. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`.
Args:
request: (AiplatformProjectsLocationsModelDeploymentMonitoringJobsOperationsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/modelDeploymentMonitoringJobs/{modelDeploymentMonitoringJobsId}/operations/{operationsId}',
http_method='DELETE',
method_id='aiplatform.projects.locations.modelDeploymentMonitoringJobs.operations.delete',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsModelDeploymentMonitoringJobsOperationsDeleteRequest',
response_type_name='GoogleProtobufEmpty',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets the latest state of a long-running operation. Clients can use this method to poll the operation result at intervals as recommended by the API service.
Args:
request: (AiplatformProjectsLocationsModelDeploymentMonitoringJobsOperationsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/modelDeploymentMonitoringJobs/{modelDeploymentMonitoringJobsId}/operations/{operationsId}',
http_method='GET',
method_id='aiplatform.projects.locations.modelDeploymentMonitoringJobs.operations.get',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsModelDeploymentMonitoringJobsOperationsGetRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists operations that match the specified filter in the request. If the server doesn't support this method, it returns `UNIMPLEMENTED`. NOTE: the `name` binding allows API services to override the binding to use different resource name schemes, such as `users/*/operations`. To override the binding, API services can add a binding such as `"/v1/{name=users/*}/operations"` to their service configuration. For backwards compatibility, the default name includes the operations collection id, however overriding users must ensure the name binding is the parent resource, without the operations collection id.
Args:
request: (AiplatformProjectsLocationsModelDeploymentMonitoringJobsOperationsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningListOperationsResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/modelDeploymentMonitoringJobs/{modelDeploymentMonitoringJobsId}/operations',
http_method='GET',
method_id='aiplatform.projects.locations.modelDeploymentMonitoringJobs.operations.list',
ordered_params=['name'],
path_params=['name'],
query_params=['filter', 'pageSize', 'pageToken'],
relative_path='v1beta1/{+name}/operations',
request_field='',
request_type_name='AiplatformProjectsLocationsModelDeploymentMonitoringJobsOperationsListRequest',
response_type_name='GoogleLongrunningListOperationsResponse',
supports_download=False,
)
def Wait(self, request, global_params=None):
r"""Waits until the specified long-running operation is done or reaches at most a specified timeout, returning the latest state. If the operation is already done, the latest state is immediately returned. If the timeout specified is greater than the default HTTP/RPC timeout, the HTTP/RPC timeout is used. If the server does not support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Note that this method is on a best-effort basis. It may return the latest state before the specified timeout (including immediately), meaning even an immediate response is no guarantee that the operation is done.
Args:
request: (AiplatformProjectsLocationsModelDeploymentMonitoringJobsOperationsWaitRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Wait')
return self._RunMethod(
config, request, global_params=global_params)
Wait.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/modelDeploymentMonitoringJobs/{modelDeploymentMonitoringJobsId}/operations/{operationsId}:wait',
http_method='POST',
method_id='aiplatform.projects.locations.modelDeploymentMonitoringJobs.operations.wait',
ordered_params=['name'],
path_params=['name'],
query_params=['timeout'],
relative_path='v1beta1/{+name}:wait',
request_field='',
request_type_name='AiplatformProjectsLocationsModelDeploymentMonitoringJobsOperationsWaitRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
class ProjectsLocationsModelDeploymentMonitoringJobsService(base_api.BaseApiService):
"""Service class for the projects_locations_modelDeploymentMonitoringJobs resource."""
_NAME = 'projects_locations_modelDeploymentMonitoringJobs'
def __init__(self, client):
super(AiplatformV1beta1.ProjectsLocationsModelDeploymentMonitoringJobsService, self).__init__(client)
self._upload_configs = {
}
def Create(self, request, global_params=None):
r"""Creates a ModelDeploymentMonitoringJob. It will run periodically on a configured interval.
Args:
request: (AiplatformProjectsLocationsModelDeploymentMonitoringJobsCreateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1ModelDeploymentMonitoringJob) The response message.
"""
config = self.GetMethodConfig('Create')
return self._RunMethod(
config, request, global_params=global_params)
Create.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/modelDeploymentMonitoringJobs',
http_method='POST',
method_id='aiplatform.projects.locations.modelDeploymentMonitoringJobs.create',
ordered_params=['parent'],
path_params=['parent'],
query_params=[],
relative_path='v1beta1/{+parent}/modelDeploymentMonitoringJobs',
request_field='googleCloudAiplatformV1beta1ModelDeploymentMonitoringJob',
request_type_name='AiplatformProjectsLocationsModelDeploymentMonitoringJobsCreateRequest',
response_type_name='GoogleCloudAiplatformV1beta1ModelDeploymentMonitoringJob',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes a ModelDeploymentMonitoringJob.
Args:
request: (AiplatformProjectsLocationsModelDeploymentMonitoringJobsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/modelDeploymentMonitoringJobs/{modelDeploymentMonitoringJobsId}',
http_method='DELETE',
method_id='aiplatform.projects.locations.modelDeploymentMonitoringJobs.delete',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsModelDeploymentMonitoringJobsDeleteRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets a ModelDeploymentMonitoringJob.
Args:
request: (AiplatformProjectsLocationsModelDeploymentMonitoringJobsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1ModelDeploymentMonitoringJob) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/modelDeploymentMonitoringJobs/{modelDeploymentMonitoringJobsId}',
http_method='GET',
method_id='aiplatform.projects.locations.modelDeploymentMonitoringJobs.get',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsModelDeploymentMonitoringJobsGetRequest',
response_type_name='GoogleCloudAiplatformV1beta1ModelDeploymentMonitoringJob',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists ModelDeploymentMonitoringJobs in a Location.
Args:
request: (AiplatformProjectsLocationsModelDeploymentMonitoringJobsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1ListModelDeploymentMonitoringJobsResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/modelDeploymentMonitoringJobs',
http_method='GET',
method_id='aiplatform.projects.locations.modelDeploymentMonitoringJobs.list',
ordered_params=['parent'],
path_params=['parent'],
query_params=['filter', 'pageSize', 'pageToken', 'readMask'],
relative_path='v1beta1/{+parent}/modelDeploymentMonitoringJobs',
request_field='',
request_type_name='AiplatformProjectsLocationsModelDeploymentMonitoringJobsListRequest',
response_type_name='GoogleCloudAiplatformV1beta1ListModelDeploymentMonitoringJobsResponse',
supports_download=False,
)
def Patch(self, request, global_params=None):
r"""Updates a ModelDeploymentMonitoringJob.
Args:
request: (AiplatformProjectsLocationsModelDeploymentMonitoringJobsPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/modelDeploymentMonitoringJobs/{modelDeploymentMonitoringJobsId}',
http_method='PATCH',
method_id='aiplatform.projects.locations.modelDeploymentMonitoringJobs.patch',
ordered_params=['name'],
path_params=['name'],
query_params=['updateMask'],
relative_path='v1beta1/{+name}',
request_field='googleCloudAiplatformV1beta1ModelDeploymentMonitoringJob',
request_type_name='AiplatformProjectsLocationsModelDeploymentMonitoringJobsPatchRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def Pause(self, request, global_params=None):
r"""Pauses a ModelDeploymentMonitoringJob. If the job is running, the server makes a best effort to cancel the job. Will mark ModelDeploymentMonitoringJob.state to 'PAUSED'.
Args:
request: (AiplatformProjectsLocationsModelDeploymentMonitoringJobsPauseRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Pause')
return self._RunMethod(
config, request, global_params=global_params)
Pause.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/modelDeploymentMonitoringJobs/{modelDeploymentMonitoringJobsId}:pause',
http_method='POST',
method_id='aiplatform.projects.locations.modelDeploymentMonitoringJobs.pause',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}:pause',
request_field='googleCloudAiplatformV1beta1PauseModelDeploymentMonitoringJobRequest',
request_type_name='AiplatformProjectsLocationsModelDeploymentMonitoringJobsPauseRequest',
response_type_name='GoogleProtobufEmpty',
supports_download=False,
)
def Resume(self, request, global_params=None):
r"""Resumes a paused ModelDeploymentMonitoringJob. It will start to run from next scheduled time. A deleted ModelDeploymentMonitoringJob can't be resumed.
Args:
request: (AiplatformProjectsLocationsModelDeploymentMonitoringJobsResumeRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Resume')
return self._RunMethod(
config, request, global_params=global_params)
Resume.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/modelDeploymentMonitoringJobs/{modelDeploymentMonitoringJobsId}:resume',
http_method='POST',
method_id='aiplatform.projects.locations.modelDeploymentMonitoringJobs.resume',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}:resume',
request_field='googleCloudAiplatformV1beta1ResumeModelDeploymentMonitoringJobRequest',
request_type_name='AiplatformProjectsLocationsModelDeploymentMonitoringJobsResumeRequest',
response_type_name='GoogleProtobufEmpty',
supports_download=False,
)
def SearchModelDeploymentMonitoringStatsAnomalies(self, request, global_params=None):
r"""Searches Model Monitoring Statistics generated within a given time window.
Args:
request: (AiplatformProjectsLocationsModelDeploymentMonitoringJobsSearchModelDeploymentMonitoringStatsAnomaliesRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1SearchModelDeploymentMonitoringStatsAnomaliesResponse) The response message.
"""
config = self.GetMethodConfig('SearchModelDeploymentMonitoringStatsAnomalies')
return self._RunMethod(
config, request, global_params=global_params)
SearchModelDeploymentMonitoringStatsAnomalies.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/modelDeploymentMonitoringJobs/{modelDeploymentMonitoringJobsId}:searchModelDeploymentMonitoringStatsAnomalies',
http_method='POST',
method_id='aiplatform.projects.locations.modelDeploymentMonitoringJobs.searchModelDeploymentMonitoringStatsAnomalies',
ordered_params=['modelDeploymentMonitoringJob'],
path_params=['modelDeploymentMonitoringJob'],
query_params=[],
relative_path='v1beta1/{+modelDeploymentMonitoringJob}:searchModelDeploymentMonitoringStatsAnomalies',
request_field='googleCloudAiplatformV1beta1SearchModelDeploymentMonitoringStatsAnomaliesRequest',
request_type_name='AiplatformProjectsLocationsModelDeploymentMonitoringJobsSearchModelDeploymentMonitoringStatsAnomaliesRequest',
response_type_name='GoogleCloudAiplatformV1beta1SearchModelDeploymentMonitoringStatsAnomaliesResponse',
supports_download=False,
)
class ProjectsLocationsModelsEvaluationsOperationsService(base_api.BaseApiService):
"""Service class for the projects_locations_models_evaluations_operations resource."""
_NAME = 'projects_locations_models_evaluations_operations'
def __init__(self, client):
super(AiplatformV1beta1.ProjectsLocationsModelsEvaluationsOperationsService, self).__init__(client)
self._upload_configs = {
}
def Cancel(self, request, global_params=None):
r"""Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.
Args:
request: (AiplatformProjectsLocationsModelsEvaluationsOperationsCancelRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Cancel')
return self._RunMethod(
config, request, global_params=global_params)
Cancel.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/models/{modelsId}/evaluations/{evaluationsId}/operations/{operationsId}:cancel',
http_method='POST',
method_id='aiplatform.projects.locations.models.evaluations.operations.cancel',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}:cancel',
request_field='',
request_type_name='AiplatformProjectsLocationsModelsEvaluationsOperationsCancelRequest',
response_type_name='GoogleProtobufEmpty',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes a long-running operation. This method indicates that the client is no longer interested in the operation result. It does not cancel the operation. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`.
Args:
request: (AiplatformProjectsLocationsModelsEvaluationsOperationsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/models/{modelsId}/evaluations/{evaluationsId}/operations/{operationsId}',
http_method='DELETE',
method_id='aiplatform.projects.locations.models.evaluations.operations.delete',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsModelsEvaluationsOperationsDeleteRequest',
response_type_name='GoogleProtobufEmpty',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets the latest state of a long-running operation. Clients can use this method to poll the operation result at intervals as recommended by the API service.
Args:
request: (AiplatformProjectsLocationsModelsEvaluationsOperationsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/models/{modelsId}/evaluations/{evaluationsId}/operations/{operationsId}',
http_method='GET',
method_id='aiplatform.projects.locations.models.evaluations.operations.get',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsModelsEvaluationsOperationsGetRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists operations that match the specified filter in the request. If the server doesn't support this method, it returns `UNIMPLEMENTED`. NOTE: the `name` binding allows API services to override the binding to use different resource name schemes, such as `users/*/operations`. To override the binding, API services can add a binding such as `"/v1/{name=users/*}/operations"` to their service configuration. For backwards compatibility, the default name includes the operations collection id, however overriding users must ensure the name binding is the parent resource, without the operations collection id.
Args:
request: (AiplatformProjectsLocationsModelsEvaluationsOperationsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningListOperationsResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/models/{modelsId}/evaluations/{evaluationsId}/operations',
http_method='GET',
method_id='aiplatform.projects.locations.models.evaluations.operations.list',
ordered_params=['name'],
path_params=['name'],
query_params=['filter', 'pageSize', 'pageToken'],
relative_path='v1beta1/{+name}/operations',
request_field='',
request_type_name='AiplatformProjectsLocationsModelsEvaluationsOperationsListRequest',
response_type_name='GoogleLongrunningListOperationsResponse',
supports_download=False,
)
def Wait(self, request, global_params=None):
r"""Waits until the specified long-running operation is done or reaches at most a specified timeout, returning the latest state. If the operation is already done, the latest state is immediately returned. If the timeout specified is greater than the default HTTP/RPC timeout, the HTTP/RPC timeout is used. If the server does not support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Note that this method is on a best-effort basis. It may return the latest state before the specified timeout (including immediately), meaning even an immediate response is no guarantee that the operation is done.
Args:
request: (AiplatformProjectsLocationsModelsEvaluationsOperationsWaitRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Wait')
return self._RunMethod(
config, request, global_params=global_params)
Wait.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/models/{modelsId}/evaluations/{evaluationsId}/operations/{operationsId}:wait',
http_method='POST',
method_id='aiplatform.projects.locations.models.evaluations.operations.wait',
ordered_params=['name'],
path_params=['name'],
query_params=['timeout'],
relative_path='v1beta1/{+name}:wait',
request_field='',
request_type_name='AiplatformProjectsLocationsModelsEvaluationsOperationsWaitRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
class ProjectsLocationsModelsEvaluationsSlicesService(base_api.BaseApiService):
"""Service class for the projects_locations_models_evaluations_slices resource."""
_NAME = 'projects_locations_models_evaluations_slices'
def __init__(self, client):
super(AiplatformV1beta1.ProjectsLocationsModelsEvaluationsSlicesService, self).__init__(client)
self._upload_configs = {
}
def Get(self, request, global_params=None):
r"""Gets a ModelEvaluationSlice.
Args:
request: (AiplatformProjectsLocationsModelsEvaluationsSlicesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1ModelEvaluationSlice) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/models/{modelsId}/evaluations/{evaluationsId}/slices/{slicesId}',
http_method='GET',
method_id='aiplatform.projects.locations.models.evaluations.slices.get',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsModelsEvaluationsSlicesGetRequest',
response_type_name='GoogleCloudAiplatformV1beta1ModelEvaluationSlice',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists ModelEvaluationSlices in a ModelEvaluation.
Args:
request: (AiplatformProjectsLocationsModelsEvaluationsSlicesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1ListModelEvaluationSlicesResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/models/{modelsId}/evaluations/{evaluationsId}/slices',
http_method='GET',
method_id='aiplatform.projects.locations.models.evaluations.slices.list',
ordered_params=['parent'],
path_params=['parent'],
query_params=['filter', 'pageSize', 'pageToken', 'readMask'],
relative_path='v1beta1/{+parent}/slices',
request_field='',
request_type_name='AiplatformProjectsLocationsModelsEvaluationsSlicesListRequest',
response_type_name='GoogleCloudAiplatformV1beta1ListModelEvaluationSlicesResponse',
supports_download=False,
)
class ProjectsLocationsModelsEvaluationsService(base_api.BaseApiService):
"""Service class for the projects_locations_models_evaluations resource."""
_NAME = 'projects_locations_models_evaluations'
def __init__(self, client):
super(AiplatformV1beta1.ProjectsLocationsModelsEvaluationsService, self).__init__(client)
self._upload_configs = {
}
def Get(self, request, global_params=None):
r"""Gets a ModelEvaluation.
Args:
request: (AiplatformProjectsLocationsModelsEvaluationsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1ModelEvaluation) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/models/{modelsId}/evaluations/{evaluationsId}',
http_method='GET',
method_id='aiplatform.projects.locations.models.evaluations.get',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsModelsEvaluationsGetRequest',
response_type_name='GoogleCloudAiplatformV1beta1ModelEvaluation',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists ModelEvaluations in a Model.
Args:
request: (AiplatformProjectsLocationsModelsEvaluationsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1ListModelEvaluationsResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/models/{modelsId}/evaluations',
http_method='GET',
method_id='aiplatform.projects.locations.models.evaluations.list',
ordered_params=['parent'],
path_params=['parent'],
query_params=['filter', 'pageSize', 'pageToken', 'readMask'],
relative_path='v1beta1/{+parent}/evaluations',
request_field='',
request_type_name='AiplatformProjectsLocationsModelsEvaluationsListRequest',
response_type_name='GoogleCloudAiplatformV1beta1ListModelEvaluationsResponse',
supports_download=False,
)
class ProjectsLocationsModelsOperationsService(base_api.BaseApiService):
"""Service class for the projects_locations_models_operations resource."""
_NAME = 'projects_locations_models_operations'
def __init__(self, client):
super(AiplatformV1beta1.ProjectsLocationsModelsOperationsService, self).__init__(client)
self._upload_configs = {
}
def Cancel(self, request, global_params=None):
r"""Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.
Args:
request: (AiplatformProjectsLocationsModelsOperationsCancelRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Cancel')
return self._RunMethod(
config, request, global_params=global_params)
Cancel.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/models/{modelsId}/operations/{operationsId}:cancel',
http_method='POST',
method_id='aiplatform.projects.locations.models.operations.cancel',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}:cancel',
request_field='',
request_type_name='AiplatformProjectsLocationsModelsOperationsCancelRequest',
response_type_name='GoogleProtobufEmpty',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes a long-running operation. This method indicates that the client is no longer interested in the operation result. It does not cancel the operation. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`.
Args:
request: (AiplatformProjectsLocationsModelsOperationsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/models/{modelsId}/operations/{operationsId}',
http_method='DELETE',
method_id='aiplatform.projects.locations.models.operations.delete',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsModelsOperationsDeleteRequest',
response_type_name='GoogleProtobufEmpty',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets the latest state of a long-running operation. Clients can use this method to poll the operation result at intervals as recommended by the API service.
Args:
request: (AiplatformProjectsLocationsModelsOperationsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/models/{modelsId}/operations/{operationsId}',
http_method='GET',
method_id='aiplatform.projects.locations.models.operations.get',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsModelsOperationsGetRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists operations that match the specified filter in the request. If the server doesn't support this method, it returns `UNIMPLEMENTED`. NOTE: the `name` binding allows API services to override the binding to use different resource name schemes, such as `users/*/operations`. To override the binding, API services can add a binding such as `"/v1/{name=users/*}/operations"` to their service configuration. For backwards compatibility, the default name includes the operations collection id, however overriding users must ensure the name binding is the parent resource, without the operations collection id.
Args:
request: (AiplatformProjectsLocationsModelsOperationsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningListOperationsResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/models/{modelsId}/operations',
http_method='GET',
method_id='aiplatform.projects.locations.models.operations.list',
ordered_params=['name'],
path_params=['name'],
query_params=['filter', 'pageSize', 'pageToken'],
relative_path='v1beta1/{+name}/operations',
request_field='',
request_type_name='AiplatformProjectsLocationsModelsOperationsListRequest',
response_type_name='GoogleLongrunningListOperationsResponse',
supports_download=False,
)
def Wait(self, request, global_params=None):
r"""Waits until the specified long-running operation is done or reaches at most a specified timeout, returning the latest state. If the operation is already done, the latest state is immediately returned. If the timeout specified is greater than the default HTTP/RPC timeout, the HTTP/RPC timeout is used. If the server does not support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Note that this method is on a best-effort basis. It may return the latest state before the specified timeout (including immediately), meaning even an immediate response is no guarantee that the operation is done.
Args:
request: (AiplatformProjectsLocationsModelsOperationsWaitRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Wait')
return self._RunMethod(
config, request, global_params=global_params)
Wait.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/models/{modelsId}/operations/{operationsId}:wait',
http_method='POST',
method_id='aiplatform.projects.locations.models.operations.wait',
ordered_params=['name'],
path_params=['name'],
query_params=['timeout'],
relative_path='v1beta1/{+name}:wait',
request_field='',
request_type_name='AiplatformProjectsLocationsModelsOperationsWaitRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
class ProjectsLocationsModelsService(base_api.BaseApiService):
"""Service class for the projects_locations_models resource."""
_NAME = 'projects_locations_models'
def __init__(self, client):
super(AiplatformV1beta1.ProjectsLocationsModelsService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
r"""Deletes a Model. A model cannot be deleted if any Endpoint resource has a DeployedModel based on the model in its deployed_models field.
Args:
request: (AiplatformProjectsLocationsModelsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/models/{modelsId}',
http_method='DELETE',
method_id='aiplatform.projects.locations.models.delete',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsModelsDeleteRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def DeleteVersion(self, request, global_params=None):
r"""Deletes a Model version. Model version can only be deleted if there are no DeployedModels created from it. Deleting the only version in the Model is not allowed. Use DeleteModel for deleting the Model instead.
Args:
request: (AiplatformProjectsLocationsModelsDeleteVersionRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('DeleteVersion')
return self._RunMethod(
config, request, global_params=global_params)
DeleteVersion.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/models/{modelsId}:deleteVersion',
http_method='DELETE',
method_id='aiplatform.projects.locations.models.deleteVersion',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}:deleteVersion',
request_field='',
request_type_name='AiplatformProjectsLocationsModelsDeleteVersionRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def Export(self, request, global_params=None):
r"""Exports a trained, exportable Model to a location specified by the user. A Model is considered to be exportable if it has at least one supported export format.
Args:
request: (AiplatformProjectsLocationsModelsExportRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Export')
return self._RunMethod(
config, request, global_params=global_params)
Export.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/models/{modelsId}:export',
http_method='POST',
method_id='aiplatform.projects.locations.models.export',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}:export',
request_field='googleCloudAiplatformV1beta1ExportModelRequest',
request_type_name='AiplatformProjectsLocationsModelsExportRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets a Model.
Args:
request: (AiplatformProjectsLocationsModelsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1Model) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/models/{modelsId}',
http_method='GET',
method_id='aiplatform.projects.locations.models.get',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsModelsGetRequest',
response_type_name='GoogleCloudAiplatformV1beta1Model',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists Models in a Location.
Args:
request: (AiplatformProjectsLocationsModelsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1ListModelsResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/models',
http_method='GET',
method_id='aiplatform.projects.locations.models.list',
ordered_params=['parent'],
path_params=['parent'],
query_params=['filter', 'pageSize', 'pageToken', 'readMask'],
relative_path='v1beta1/{+parent}/models',
request_field='',
request_type_name='AiplatformProjectsLocationsModelsListRequest',
response_type_name='GoogleCloudAiplatformV1beta1ListModelsResponse',
supports_download=False,
)
def ListVersions(self, request, global_params=None):
r"""Lists versions of the specified model.
Args:
request: (AiplatformProjectsLocationsModelsListVersionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1ListModelVersionsResponse) The response message.
"""
config = self.GetMethodConfig('ListVersions')
return self._RunMethod(
config, request, global_params=global_params)
ListVersions.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/models/{modelsId}:listVersions',
http_method='GET',
method_id='aiplatform.projects.locations.models.listVersions',
ordered_params=['name'],
path_params=['name'],
query_params=['filter', 'pageSize', 'pageToken', 'readMask'],
relative_path='v1beta1/{+name}:listVersions',
request_field='',
request_type_name='AiplatformProjectsLocationsModelsListVersionsRequest',
response_type_name='GoogleCloudAiplatformV1beta1ListModelVersionsResponse',
supports_download=False,
)
def Patch(self, request, global_params=None):
r"""Updates a Model.
Args:
request: (AiplatformProjectsLocationsModelsPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1Model) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/models/{modelsId}',
http_method='PATCH',
method_id='aiplatform.projects.locations.models.patch',
ordered_params=['name'],
path_params=['name'],
query_params=['updateMask'],
relative_path='v1beta1/{+name}',
request_field='googleCloudAiplatformV1beta1Model',
request_type_name='AiplatformProjectsLocationsModelsPatchRequest',
response_type_name='GoogleCloudAiplatformV1beta1Model',
supports_download=False,
)
def SetVersionAlias(self, request, global_params=None):
r"""Sets an alias for a Model version.
Args:
request: (AiplatformProjectsLocationsModelsSetVersionAliasRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1Model) The response message.
"""
config = self.GetMethodConfig('SetVersionAlias')
return self._RunMethod(
config, request, global_params=global_params)
SetVersionAlias.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/models/{modelsId}:setVersionAlias',
http_method='POST',
method_id='aiplatform.projects.locations.models.setVersionAlias',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}:setVersionAlias',
request_field='googleCloudAiplatformV1beta1SetVersionAliasRequest',
request_type_name='AiplatformProjectsLocationsModelsSetVersionAliasRequest',
response_type_name='GoogleCloudAiplatformV1beta1Model',
supports_download=False,
)
def Upload(self, request, global_params=None):
r"""Uploads a Model artifact into Vertex AI.
Args:
request: (AiplatformProjectsLocationsModelsUploadRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Upload')
return self._RunMethod(
config, request, global_params=global_params)
Upload.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/models:upload',
http_method='POST',
method_id='aiplatform.projects.locations.models.upload',
ordered_params=['parent'],
path_params=['parent'],
query_params=[],
relative_path='v1beta1/{+parent}/models:upload',
request_field='googleCloudAiplatformV1beta1UploadModelRequest',
request_type_name='AiplatformProjectsLocationsModelsUploadRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
class ProjectsLocationsOperationsService(base_api.BaseApiService):
"""Service class for the projects_locations_operations resource."""
_NAME = 'projects_locations_operations'
def __init__(self, client):
super(AiplatformV1beta1.ProjectsLocationsOperationsService, self).__init__(client)
self._upload_configs = {
}
def Cancel(self, request, global_params=None):
r"""Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.
Args:
request: (AiplatformProjectsLocationsOperationsCancelRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Cancel')
return self._RunMethod(
config, request, global_params=global_params)
Cancel.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/operations/{operationsId}:cancel',
http_method='POST',
method_id='aiplatform.projects.locations.operations.cancel',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}:cancel',
request_field='',
request_type_name='AiplatformProjectsLocationsOperationsCancelRequest',
response_type_name='GoogleProtobufEmpty',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes a long-running operation. This method indicates that the client is no longer interested in the operation result. It does not cancel the operation. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`.
Args:
request: (AiplatformProjectsLocationsOperationsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/operations/{operationsId}',
http_method='DELETE',
method_id='aiplatform.projects.locations.operations.delete',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsOperationsDeleteRequest',
response_type_name='GoogleProtobufEmpty',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets the latest state of a long-running operation. Clients can use this method to poll the operation result at intervals as recommended by the API service.
Args:
request: (AiplatformProjectsLocationsOperationsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/operations/{operationsId}',
http_method='GET',
method_id='aiplatform.projects.locations.operations.get',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsOperationsGetRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists operations that match the specified filter in the request. If the server doesn't support this method, it returns `UNIMPLEMENTED`. NOTE: the `name` binding allows API services to override the binding to use different resource name schemes, such as `users/*/operations`. To override the binding, API services can add a binding such as `"/v1/{name=users/*}/operations"` to their service configuration. For backwards compatibility, the default name includes the operations collection id, however overriding users must ensure the name binding is the parent resource, without the operations collection id.
Args:
request: (AiplatformProjectsLocationsOperationsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningListOperationsResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/operations',
http_method='GET',
method_id='aiplatform.projects.locations.operations.list',
ordered_params=['name'],
path_params=['name'],
query_params=['filter', 'pageSize', 'pageToken'],
relative_path='v1beta1/{+name}/operations',
request_field='',
request_type_name='AiplatformProjectsLocationsOperationsListRequest',
response_type_name='GoogleLongrunningListOperationsResponse',
supports_download=False,
)
def Wait(self, request, global_params=None):
r"""Waits until the specified long-running operation is done or reaches at most a specified timeout, returning the latest state. If the operation is already done, the latest state is immediately returned. If the timeout specified is greater than the default HTTP/RPC timeout, the HTTP/RPC timeout is used. If the server does not support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Note that this method is on a best-effort basis. It may return the latest state before the specified timeout (including immediately), meaning even an immediate response is no guarantee that the operation is done.
Args:
request: (AiplatformProjectsLocationsOperationsWaitRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Wait')
return self._RunMethod(
config, request, global_params=global_params)
Wait.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/operations/{operationsId}:wait',
http_method='POST',
method_id='aiplatform.projects.locations.operations.wait',
ordered_params=['name'],
path_params=['name'],
query_params=['timeout'],
relative_path='v1beta1/{+name}:wait',
request_field='',
request_type_name='AiplatformProjectsLocationsOperationsWaitRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
class ProjectsLocationsPipelineJobsOperationsService(base_api.BaseApiService):
"""Service class for the projects_locations_pipelineJobs_operations resource."""
_NAME = 'projects_locations_pipelineJobs_operations'
def __init__(self, client):
super(AiplatformV1beta1.ProjectsLocationsPipelineJobsOperationsService, self).__init__(client)
self._upload_configs = {
}
def Cancel(self, request, global_params=None):
r"""Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.
Args:
request: (AiplatformProjectsLocationsPipelineJobsOperationsCancelRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Cancel')
return self._RunMethod(
config, request, global_params=global_params)
Cancel.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/pipelineJobs/{pipelineJobsId}/operations/{operationsId}:cancel',
http_method='POST',
method_id='aiplatform.projects.locations.pipelineJobs.operations.cancel',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}:cancel',
request_field='',
request_type_name='AiplatformProjectsLocationsPipelineJobsOperationsCancelRequest',
response_type_name='GoogleProtobufEmpty',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes a long-running operation. This method indicates that the client is no longer interested in the operation result. It does not cancel the operation. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`.
Args:
request: (AiplatformProjectsLocationsPipelineJobsOperationsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/pipelineJobs/{pipelineJobsId}/operations/{operationsId}',
http_method='DELETE',
method_id='aiplatform.projects.locations.pipelineJobs.operations.delete',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsPipelineJobsOperationsDeleteRequest',
response_type_name='GoogleProtobufEmpty',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets the latest state of a long-running operation. Clients can use this method to poll the operation result at intervals as recommended by the API service.
Args:
request: (AiplatformProjectsLocationsPipelineJobsOperationsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/pipelineJobs/{pipelineJobsId}/operations/{operationsId}',
http_method='GET',
method_id='aiplatform.projects.locations.pipelineJobs.operations.get',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsPipelineJobsOperationsGetRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists operations that match the specified filter in the request. If the server doesn't support this method, it returns `UNIMPLEMENTED`. NOTE: the `name` binding allows API services to override the binding to use different resource name schemes, such as `users/*/operations`. To override the binding, API services can add a binding such as `"/v1/{name=users/*}/operations"` to their service configuration. For backwards compatibility, the default name includes the operations collection id, however overriding users must ensure the name binding is the parent resource, without the operations collection id.
Args:
request: (AiplatformProjectsLocationsPipelineJobsOperationsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningListOperationsResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/pipelineJobs/{pipelineJobsId}/operations',
http_method='GET',
method_id='aiplatform.projects.locations.pipelineJobs.operations.list',
ordered_params=['name'],
path_params=['name'],
query_params=['filter', 'pageSize', 'pageToken'],
relative_path='v1beta1/{+name}/operations',
request_field='',
request_type_name='AiplatformProjectsLocationsPipelineJobsOperationsListRequest',
response_type_name='GoogleLongrunningListOperationsResponse',
supports_download=False,
)
def Wait(self, request, global_params=None):
r"""Waits until the specified long-running operation is done or reaches at most a specified timeout, returning the latest state. If the operation is already done, the latest state is immediately returned. If the timeout specified is greater than the default HTTP/RPC timeout, the HTTP/RPC timeout is used. If the server does not support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Note that this method is on a best-effort basis. It may return the latest state before the specified timeout (including immediately), meaning even an immediate response is no guarantee that the operation is done.
Args:
request: (AiplatformProjectsLocationsPipelineJobsOperationsWaitRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Wait')
return self._RunMethod(
config, request, global_params=global_params)
Wait.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/pipelineJobs/{pipelineJobsId}/operations/{operationsId}:wait',
http_method='POST',
method_id='aiplatform.projects.locations.pipelineJobs.operations.wait',
ordered_params=['name'],
path_params=['name'],
query_params=['timeout'],
relative_path='v1beta1/{+name}:wait',
request_field='',
request_type_name='AiplatformProjectsLocationsPipelineJobsOperationsWaitRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
class ProjectsLocationsPipelineJobsService(base_api.BaseApiService):
"""Service class for the projects_locations_pipelineJobs resource."""
_NAME = 'projects_locations_pipelineJobs'
def __init__(self, client):
super(AiplatformV1beta1.ProjectsLocationsPipelineJobsService, self).__init__(client)
self._upload_configs = {
}
def Cancel(self, request, global_params=None):
r"""Cancels a PipelineJob. Starts asynchronous cancellation on the PipelineJob. The server makes a best effort to cancel the pipeline, but success is not guaranteed. Clients can use PipelineService.GetPipelineJob or other methods to check whether the cancellation succeeded or whether the pipeline completed despite cancellation. On successful cancellation, the PipelineJob is not deleted; instead it becomes a pipeline with a PipelineJob.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`, and PipelineJob.state is set to `CANCELLED`.
Args:
request: (AiplatformProjectsLocationsPipelineJobsCancelRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Cancel')
return self._RunMethod(
config, request, global_params=global_params)
Cancel.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/pipelineJobs/{pipelineJobsId}:cancel',
http_method='POST',
method_id='aiplatform.projects.locations.pipelineJobs.cancel',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}:cancel',
request_field='googleCloudAiplatformV1beta1CancelPipelineJobRequest',
request_type_name='AiplatformProjectsLocationsPipelineJobsCancelRequest',
response_type_name='GoogleProtobufEmpty',
supports_download=False,
)
def Create(self, request, global_params=None):
r"""Creates a PipelineJob. A PipelineJob will run immediately when created.
Args:
request: (AiplatformProjectsLocationsPipelineJobsCreateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1PipelineJob) The response message.
"""
config = self.GetMethodConfig('Create')
return self._RunMethod(
config, request, global_params=global_params)
Create.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/pipelineJobs',
http_method='POST',
method_id='aiplatform.projects.locations.pipelineJobs.create',
ordered_params=['parent'],
path_params=['parent'],
query_params=['pipelineJobId'],
relative_path='v1beta1/{+parent}/pipelineJobs',
request_field='googleCloudAiplatformV1beta1PipelineJob',
request_type_name='AiplatformProjectsLocationsPipelineJobsCreateRequest',
response_type_name='GoogleCloudAiplatformV1beta1PipelineJob',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes a PipelineJob.
Args:
request: (AiplatformProjectsLocationsPipelineJobsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/pipelineJobs/{pipelineJobsId}',
http_method='DELETE',
method_id='aiplatform.projects.locations.pipelineJobs.delete',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsPipelineJobsDeleteRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets a PipelineJob.
Args:
request: (AiplatformProjectsLocationsPipelineJobsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1PipelineJob) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/pipelineJobs/{pipelineJobsId}',
http_method='GET',
method_id='aiplatform.projects.locations.pipelineJobs.get',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsPipelineJobsGetRequest',
response_type_name='GoogleCloudAiplatformV1beta1PipelineJob',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists PipelineJobs in a Location.
Args:
request: (AiplatformProjectsLocationsPipelineJobsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1ListPipelineJobsResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/pipelineJobs',
http_method='GET',
method_id='aiplatform.projects.locations.pipelineJobs.list',
ordered_params=['parent'],
path_params=['parent'],
query_params=['filter', 'orderBy', 'pageSize', 'pageToken'],
relative_path='v1beta1/{+parent}/pipelineJobs',
request_field='',
request_type_name='AiplatformProjectsLocationsPipelineJobsListRequest',
response_type_name='GoogleCloudAiplatformV1beta1ListPipelineJobsResponse',
supports_download=False,
)
class ProjectsLocationsSpecialistPoolsOperationsService(base_api.BaseApiService):
"""Service class for the projects_locations_specialistPools_operations resource."""
_NAME = 'projects_locations_specialistPools_operations'
def __init__(self, client):
super(AiplatformV1beta1.ProjectsLocationsSpecialistPoolsOperationsService, self).__init__(client)
self._upload_configs = {
}
def Cancel(self, request, global_params=None):
r"""Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.
Args:
request: (AiplatformProjectsLocationsSpecialistPoolsOperationsCancelRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Cancel')
return self._RunMethod(
config, request, global_params=global_params)
Cancel.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/specialistPools/{specialistPoolsId}/operations/{operationsId}:cancel',
http_method='POST',
method_id='aiplatform.projects.locations.specialistPools.operations.cancel',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}:cancel',
request_field='',
request_type_name='AiplatformProjectsLocationsSpecialistPoolsOperationsCancelRequest',
response_type_name='GoogleProtobufEmpty',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes a long-running operation. This method indicates that the client is no longer interested in the operation result. It does not cancel the operation. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`.
Args:
request: (AiplatformProjectsLocationsSpecialistPoolsOperationsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/specialistPools/{specialistPoolsId}/operations/{operationsId}',
http_method='DELETE',
method_id='aiplatform.projects.locations.specialistPools.operations.delete',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsSpecialistPoolsOperationsDeleteRequest',
response_type_name='GoogleProtobufEmpty',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets the latest state of a long-running operation. Clients can use this method to poll the operation result at intervals as recommended by the API service.
Args:
request: (AiplatformProjectsLocationsSpecialistPoolsOperationsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/specialistPools/{specialistPoolsId}/operations/{operationsId}',
http_method='GET',
method_id='aiplatform.projects.locations.specialistPools.operations.get',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsSpecialistPoolsOperationsGetRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists operations that match the specified filter in the request. If the server doesn't support this method, it returns `UNIMPLEMENTED`. NOTE: the `name` binding allows API services to override the binding to use different resource name schemes, such as `users/*/operations`. To override the binding, API services can add a binding such as `"/v1/{name=users/*}/operations"` to their service configuration. For backwards compatibility, the default name includes the operations collection id, however overriding users must ensure the name binding is the parent resource, without the operations collection id.
Args:
request: (AiplatformProjectsLocationsSpecialistPoolsOperationsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningListOperationsResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/specialistPools/{specialistPoolsId}/operations',
http_method='GET',
method_id='aiplatform.projects.locations.specialistPools.operations.list',
ordered_params=['name'],
path_params=['name'],
query_params=['filter', 'pageSize', 'pageToken'],
relative_path='v1beta1/{+name}/operations',
request_field='',
request_type_name='AiplatformProjectsLocationsSpecialistPoolsOperationsListRequest',
response_type_name='GoogleLongrunningListOperationsResponse',
supports_download=False,
)
def Wait(self, request, global_params=None):
r"""Waits until the specified long-running operation is done or reaches at most a specified timeout, returning the latest state. If the operation is already done, the latest state is immediately returned. If the timeout specified is greater than the default HTTP/RPC timeout, the HTTP/RPC timeout is used. If the server does not support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Note that this method is on a best-effort basis. It may return the latest state before the specified timeout (including immediately), meaning even an immediate response is no guarantee that the operation is done.
Args:
request: (AiplatformProjectsLocationsSpecialistPoolsOperationsWaitRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Wait')
return self._RunMethod(
config, request, global_params=global_params)
Wait.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/specialistPools/{specialistPoolsId}/operations/{operationsId}:wait',
http_method='POST',
method_id='aiplatform.projects.locations.specialistPools.operations.wait',
ordered_params=['name'],
path_params=['name'],
query_params=['timeout'],
relative_path='v1beta1/{+name}:wait',
request_field='',
request_type_name='AiplatformProjectsLocationsSpecialistPoolsOperationsWaitRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
class ProjectsLocationsSpecialistPoolsService(base_api.BaseApiService):
"""Service class for the projects_locations_specialistPools resource."""
_NAME = 'projects_locations_specialistPools'
def __init__(self, client):
super(AiplatformV1beta1.ProjectsLocationsSpecialistPoolsService, self).__init__(client)
self._upload_configs = {
}
def Create(self, request, global_params=None):
r"""Creates a SpecialistPool.
Args:
request: (AiplatformProjectsLocationsSpecialistPoolsCreateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Create')
return self._RunMethod(
config, request, global_params=global_params)
Create.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/specialistPools',
http_method='POST',
method_id='aiplatform.projects.locations.specialistPools.create',
ordered_params=['parent'],
path_params=['parent'],
query_params=[],
relative_path='v1beta1/{+parent}/specialistPools',
request_field='googleCloudAiplatformV1beta1SpecialistPool',
request_type_name='AiplatformProjectsLocationsSpecialistPoolsCreateRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes a SpecialistPool as well as all Specialists in the pool.
Args:
request: (AiplatformProjectsLocationsSpecialistPoolsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/specialistPools/{specialistPoolsId}',
http_method='DELETE',
method_id='aiplatform.projects.locations.specialistPools.delete',
ordered_params=['name'],
path_params=['name'],
query_params=['force'],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsSpecialistPoolsDeleteRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets a SpecialistPool.
Args:
request: (AiplatformProjectsLocationsSpecialistPoolsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1SpecialistPool) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/specialistPools/{specialistPoolsId}',
http_method='GET',
method_id='aiplatform.projects.locations.specialistPools.get',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsSpecialistPoolsGetRequest',
response_type_name='GoogleCloudAiplatformV1beta1SpecialistPool',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists SpecialistPools in a Location.
Args:
request: (AiplatformProjectsLocationsSpecialistPoolsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1ListSpecialistPoolsResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/specialistPools',
http_method='GET',
method_id='aiplatform.projects.locations.specialistPools.list',
ordered_params=['parent'],
path_params=['parent'],
query_params=['pageSize', 'pageToken', 'readMask'],
relative_path='v1beta1/{+parent}/specialistPools',
request_field='',
request_type_name='AiplatformProjectsLocationsSpecialistPoolsListRequest',
response_type_name='GoogleCloudAiplatformV1beta1ListSpecialistPoolsResponse',
supports_download=False,
)
def Patch(self, request, global_params=None):
r"""Updates a SpecialistPool.
Args:
request: (AiplatformProjectsLocationsSpecialistPoolsPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/specialistPools/{specialistPoolsId}',
http_method='PATCH',
method_id='aiplatform.projects.locations.specialistPools.patch',
ordered_params=['name'],
path_params=['name'],
query_params=['updateMask'],
relative_path='v1beta1/{+name}',
request_field='googleCloudAiplatformV1beta1SpecialistPool',
request_type_name='AiplatformProjectsLocationsSpecialistPoolsPatchRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
class ProjectsLocationsStudiesOperationsService(base_api.BaseApiService):
"""Service class for the projects_locations_studies_operations resource."""
_NAME = 'projects_locations_studies_operations'
def __init__(self, client):
super(AiplatformV1beta1.ProjectsLocationsStudiesOperationsService, self).__init__(client)
self._upload_configs = {
}
def Cancel(self, request, global_params=None):
r"""Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.
Args:
request: (AiplatformProjectsLocationsStudiesOperationsCancelRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Cancel')
return self._RunMethod(
config, request, global_params=global_params)
Cancel.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/studies/{studiesId}/operations/{operationsId}:cancel',
http_method='POST',
method_id='aiplatform.projects.locations.studies.operations.cancel',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}:cancel',
request_field='',
request_type_name='AiplatformProjectsLocationsStudiesOperationsCancelRequest',
response_type_name='GoogleProtobufEmpty',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes a long-running operation. This method indicates that the client is no longer interested in the operation result. It does not cancel the operation. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`.
Args:
request: (AiplatformProjectsLocationsStudiesOperationsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/studies/{studiesId}/operations/{operationsId}',
http_method='DELETE',
method_id='aiplatform.projects.locations.studies.operations.delete',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsStudiesOperationsDeleteRequest',
response_type_name='GoogleProtobufEmpty',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets the latest state of a long-running operation. Clients can use this method to poll the operation result at intervals as recommended by the API service.
Args:
request: (AiplatformProjectsLocationsStudiesOperationsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/studies/{studiesId}/operations/{operationsId}',
http_method='GET',
method_id='aiplatform.projects.locations.studies.operations.get',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsStudiesOperationsGetRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists operations that match the specified filter in the request. If the server doesn't support this method, it returns `UNIMPLEMENTED`. NOTE: the `name` binding allows API services to override the binding to use different resource name schemes, such as `users/*/operations`. To override the binding, API services can add a binding such as `"/v1/{name=users/*}/operations"` to their service configuration. For backwards compatibility, the default name includes the operations collection id, however overriding users must ensure the name binding is the parent resource, without the operations collection id.
Args:
request: (AiplatformProjectsLocationsStudiesOperationsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningListOperationsResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/studies/{studiesId}/operations',
http_method='GET',
method_id='aiplatform.projects.locations.studies.operations.list',
ordered_params=['name'],
path_params=['name'],
query_params=['filter', 'pageSize', 'pageToken'],
relative_path='v1beta1/{+name}/operations',
request_field='',
request_type_name='AiplatformProjectsLocationsStudiesOperationsListRequest',
response_type_name='GoogleLongrunningListOperationsResponse',
supports_download=False,
)
def Wait(self, request, global_params=None):
r"""Waits until the specified long-running operation is done or reaches at most a specified timeout, returning the latest state. If the operation is already done, the latest state is immediately returned. If the timeout specified is greater than the default HTTP/RPC timeout, the HTTP/RPC timeout is used. If the server does not support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Note that this method is on a best-effort basis. It may return the latest state before the specified timeout (including immediately), meaning even an immediate response is no guarantee that the operation is done.
Args:
request: (AiplatformProjectsLocationsStudiesOperationsWaitRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Wait')
return self._RunMethod(
config, request, global_params=global_params)
Wait.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/studies/{studiesId}/operations/{operationsId}:wait',
http_method='POST',
method_id='aiplatform.projects.locations.studies.operations.wait',
ordered_params=['name'],
path_params=['name'],
query_params=['timeout'],
relative_path='v1beta1/{+name}:wait',
request_field='',
request_type_name='AiplatformProjectsLocationsStudiesOperationsWaitRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
class ProjectsLocationsStudiesTrialsOperationsService(base_api.BaseApiService):
"""Service class for the projects_locations_studies_trials_operations resource."""
_NAME = 'projects_locations_studies_trials_operations'
def __init__(self, client):
super(AiplatformV1beta1.ProjectsLocationsStudiesTrialsOperationsService, self).__init__(client)
self._upload_configs = {
}
def Cancel(self, request, global_params=None):
r"""Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.
Args:
request: (AiplatformProjectsLocationsStudiesTrialsOperationsCancelRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Cancel')
return self._RunMethod(
config, request, global_params=global_params)
Cancel.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/studies/{studiesId}/trials/{trialsId}/operations/{operationsId}:cancel',
http_method='POST',
method_id='aiplatform.projects.locations.studies.trials.operations.cancel',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}:cancel',
request_field='',
request_type_name='AiplatformProjectsLocationsStudiesTrialsOperationsCancelRequest',
response_type_name='GoogleProtobufEmpty',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes a long-running operation. This method indicates that the client is no longer interested in the operation result. It does not cancel the operation. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`.
Args:
request: (AiplatformProjectsLocationsStudiesTrialsOperationsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/studies/{studiesId}/trials/{trialsId}/operations/{operationsId}',
http_method='DELETE',
method_id='aiplatform.projects.locations.studies.trials.operations.delete',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsStudiesTrialsOperationsDeleteRequest',
response_type_name='GoogleProtobufEmpty',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets the latest state of a long-running operation. Clients can use this method to poll the operation result at intervals as recommended by the API service.
Args:
request: (AiplatformProjectsLocationsStudiesTrialsOperationsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/studies/{studiesId}/trials/{trialsId}/operations/{operationsId}',
http_method='GET',
method_id='aiplatform.projects.locations.studies.trials.operations.get',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsStudiesTrialsOperationsGetRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists operations that match the specified filter in the request. If the server doesn't support this method, it returns `UNIMPLEMENTED`. NOTE: the `name` binding allows API services to override the binding to use different resource name schemes, such as `users/*/operations`. To override the binding, API services can add a binding such as `"/v1/{name=users/*}/operations"` to their service configuration. For backwards compatibility, the default name includes the operations collection id, however overriding users must ensure the name binding is the parent resource, without the operations collection id.
Args:
request: (AiplatformProjectsLocationsStudiesTrialsOperationsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningListOperationsResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/studies/{studiesId}/trials/{trialsId}/operations',
http_method='GET',
method_id='aiplatform.projects.locations.studies.trials.operations.list',
ordered_params=['name'],
path_params=['name'],
query_params=['filter', 'pageSize', 'pageToken'],
relative_path='v1beta1/{+name}/operations',
request_field='',
request_type_name='AiplatformProjectsLocationsStudiesTrialsOperationsListRequest',
response_type_name='GoogleLongrunningListOperationsResponse',
supports_download=False,
)
def Wait(self, request, global_params=None):
r"""Waits until the specified long-running operation is done or reaches at most a specified timeout, returning the latest state. If the operation is already done, the latest state is immediately returned. If the timeout specified is greater than the default HTTP/RPC timeout, the HTTP/RPC timeout is used. If the server does not support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Note that this method is on a best-effort basis. It may return the latest state before the specified timeout (including immediately), meaning even an immediate response is no guarantee that the operation is done.
Args:
request: (AiplatformProjectsLocationsStudiesTrialsOperationsWaitRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Wait')
return self._RunMethod(
config, request, global_params=global_params)
Wait.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/studies/{studiesId}/trials/{trialsId}/operations/{operationsId}:wait',
http_method='POST',
method_id='aiplatform.projects.locations.studies.trials.operations.wait',
ordered_params=['name'],
path_params=['name'],
query_params=['timeout'],
relative_path='v1beta1/{+name}:wait',
request_field='',
request_type_name='AiplatformProjectsLocationsStudiesTrialsOperationsWaitRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
class ProjectsLocationsStudiesTrialsService(base_api.BaseApiService):
"""Service class for the projects_locations_studies_trials resource."""
_NAME = 'projects_locations_studies_trials'
def __init__(self, client):
super(AiplatformV1beta1.ProjectsLocationsStudiesTrialsService, self).__init__(client)
self._upload_configs = {
}
def AddTrialMeasurement(self, request, global_params=None):
r"""Adds a measurement of the objective metrics to a Trial. This measurement is assumed to have been taken before the Trial is complete.
Args:
request: (AiplatformProjectsLocationsStudiesTrialsAddTrialMeasurementRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1Trial) The response message.
"""
config = self.GetMethodConfig('AddTrialMeasurement')
return self._RunMethod(
config, request, global_params=global_params)
AddTrialMeasurement.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/studies/{studiesId}/trials/{trialsId}:addTrialMeasurement',
http_method='POST',
method_id='aiplatform.projects.locations.studies.trials.addTrialMeasurement',
ordered_params=['trialName'],
path_params=['trialName'],
query_params=[],
relative_path='v1beta1/{+trialName}:addTrialMeasurement',
request_field='googleCloudAiplatformV1beta1AddTrialMeasurementRequest',
request_type_name='AiplatformProjectsLocationsStudiesTrialsAddTrialMeasurementRequest',
response_type_name='GoogleCloudAiplatformV1beta1Trial',
supports_download=False,
)
def CheckTrialEarlyStoppingState(self, request, global_params=None):
r"""Checks whether a Trial should stop or not. Returns a long-running operation. When the operation is successful, it will contain a CheckTrialEarlyStoppingStateResponse.
Args:
request: (AiplatformProjectsLocationsStudiesTrialsCheckTrialEarlyStoppingStateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('CheckTrialEarlyStoppingState')
return self._RunMethod(
config, request, global_params=global_params)
CheckTrialEarlyStoppingState.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/studies/{studiesId}/trials/{trialsId}:checkTrialEarlyStoppingState',
http_method='POST',
method_id='aiplatform.projects.locations.studies.trials.checkTrialEarlyStoppingState',
ordered_params=['trialName'],
path_params=['trialName'],
query_params=[],
relative_path='v1beta1/{+trialName}:checkTrialEarlyStoppingState',
request_field='googleCloudAiplatformV1beta1CheckTrialEarlyStoppingStateRequest',
request_type_name='AiplatformProjectsLocationsStudiesTrialsCheckTrialEarlyStoppingStateRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def Complete(self, request, global_params=None):
r"""Marks a Trial as complete.
Args:
request: (AiplatformProjectsLocationsStudiesTrialsCompleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1Trial) The response message.
"""
config = self.GetMethodConfig('Complete')
return self._RunMethod(
config, request, global_params=global_params)
Complete.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/studies/{studiesId}/trials/{trialsId}:complete',
http_method='POST',
method_id='aiplatform.projects.locations.studies.trials.complete',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}:complete',
request_field='googleCloudAiplatformV1beta1CompleteTrialRequest',
request_type_name='AiplatformProjectsLocationsStudiesTrialsCompleteRequest',
response_type_name='GoogleCloudAiplatformV1beta1Trial',
supports_download=False,
)
def Create(self, request, global_params=None):
r"""Adds a user provided Trial to a Study.
Args:
request: (AiplatformProjectsLocationsStudiesTrialsCreateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1Trial) The response message.
"""
config = self.GetMethodConfig('Create')
return self._RunMethod(
config, request, global_params=global_params)
Create.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/studies/{studiesId}/trials',
http_method='POST',
method_id='aiplatform.projects.locations.studies.trials.create',
ordered_params=['parent'],
path_params=['parent'],
query_params=[],
relative_path='v1beta1/{+parent}/trials',
request_field='googleCloudAiplatformV1beta1Trial',
request_type_name='AiplatformProjectsLocationsStudiesTrialsCreateRequest',
response_type_name='GoogleCloudAiplatformV1beta1Trial',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes a Trial.
Args:
request: (AiplatformProjectsLocationsStudiesTrialsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/studies/{studiesId}/trials/{trialsId}',
http_method='DELETE',
method_id='aiplatform.projects.locations.studies.trials.delete',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsStudiesTrialsDeleteRequest',
response_type_name='GoogleProtobufEmpty',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets a Trial.
Args:
request: (AiplatformProjectsLocationsStudiesTrialsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1Trial) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/studies/{studiesId}/trials/{trialsId}',
http_method='GET',
method_id='aiplatform.projects.locations.studies.trials.get',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsStudiesTrialsGetRequest',
response_type_name='GoogleCloudAiplatformV1beta1Trial',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists the Trials associated with a Study.
Args:
request: (AiplatformProjectsLocationsStudiesTrialsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1ListTrialsResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/studies/{studiesId}/trials',
http_method='GET',
method_id='aiplatform.projects.locations.studies.trials.list',
ordered_params=['parent'],
path_params=['parent'],
query_params=['pageSize', 'pageToken'],
relative_path='v1beta1/{+parent}/trials',
request_field='',
request_type_name='AiplatformProjectsLocationsStudiesTrialsListRequest',
response_type_name='GoogleCloudAiplatformV1beta1ListTrialsResponse',
supports_download=False,
)
def ListOptimalTrials(self, request, global_params=None):
r"""Lists the pareto-optimal Trials for multi-objective Study or the optimal Trials for single-objective Study. The definition of pareto-optimal can be checked in wiki page. https://en.wikipedia.org/wiki/Pareto_efficiency.
Args:
request: (AiplatformProjectsLocationsStudiesTrialsListOptimalTrialsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1ListOptimalTrialsResponse) The response message.
"""
config = self.GetMethodConfig('ListOptimalTrials')
return self._RunMethod(
config, request, global_params=global_params)
ListOptimalTrials.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/studies/{studiesId}/trials:listOptimalTrials',
http_method='POST',
method_id='aiplatform.projects.locations.studies.trials.listOptimalTrials',
ordered_params=['parent'],
path_params=['parent'],
query_params=[],
relative_path='v1beta1/{+parent}/trials:listOptimalTrials',
request_field='googleCloudAiplatformV1beta1ListOptimalTrialsRequest',
request_type_name='AiplatformProjectsLocationsStudiesTrialsListOptimalTrialsRequest',
response_type_name='GoogleCloudAiplatformV1beta1ListOptimalTrialsResponse',
supports_download=False,
)
def Stop(self, request, global_params=None):
r"""Stops a Trial.
Args:
request: (AiplatformProjectsLocationsStudiesTrialsStopRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1Trial) The response message.
"""
config = self.GetMethodConfig('Stop')
return self._RunMethod(
config, request, global_params=global_params)
Stop.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/studies/{studiesId}/trials/{trialsId}:stop',
http_method='POST',
method_id='aiplatform.projects.locations.studies.trials.stop',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}:stop',
request_field='googleCloudAiplatformV1beta1StopTrialRequest',
request_type_name='AiplatformProjectsLocationsStudiesTrialsStopRequest',
response_type_name='GoogleCloudAiplatformV1beta1Trial',
supports_download=False,
)
def Suggest(self, request, global_params=None):
r"""Adds one or more Trials to a Study, with parameter values suggested by Vertex AI Vizier. Returns a long-running operation associated with the generation of Trial suggestions. When this long-running operation succeeds, it will contain a SuggestTrialsResponse.
Args:
request: (AiplatformProjectsLocationsStudiesTrialsSuggestRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Suggest')
return self._RunMethod(
config, request, global_params=global_params)
Suggest.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/studies/{studiesId}/trials:suggest',
http_method='POST',
method_id='aiplatform.projects.locations.studies.trials.suggest',
ordered_params=['parent'],
path_params=['parent'],
query_params=[],
relative_path='v1beta1/{+parent}/trials:suggest',
request_field='googleCloudAiplatformV1beta1SuggestTrialsRequest',
request_type_name='AiplatformProjectsLocationsStudiesTrialsSuggestRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
class ProjectsLocationsStudiesService(base_api.BaseApiService):
"""Service class for the projects_locations_studies resource."""
_NAME = 'projects_locations_studies'
def __init__(self, client):
super(AiplatformV1beta1.ProjectsLocationsStudiesService, self).__init__(client)
self._upload_configs = {
}
def Create(self, request, global_params=None):
r"""Creates a Study. A resource name will be generated after creation of the Study.
Args:
request: (AiplatformProjectsLocationsStudiesCreateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1Study) The response message.
"""
config = self.GetMethodConfig('Create')
return self._RunMethod(
config, request, global_params=global_params)
Create.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/studies',
http_method='POST',
method_id='aiplatform.projects.locations.studies.create',
ordered_params=['parent'],
path_params=['parent'],
query_params=[],
relative_path='v1beta1/{+parent}/studies',
request_field='googleCloudAiplatformV1beta1Study',
request_type_name='AiplatformProjectsLocationsStudiesCreateRequest',
response_type_name='GoogleCloudAiplatformV1beta1Study',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes a Study.
Args:
request: (AiplatformProjectsLocationsStudiesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/studies/{studiesId}',
http_method='DELETE',
method_id='aiplatform.projects.locations.studies.delete',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsStudiesDeleteRequest',
response_type_name='GoogleProtobufEmpty',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets a Study by name.
Args:
request: (AiplatformProjectsLocationsStudiesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1Study) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/studies/{studiesId}',
http_method='GET',
method_id='aiplatform.projects.locations.studies.get',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsStudiesGetRequest',
response_type_name='GoogleCloudAiplatformV1beta1Study',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists all the studies in a region for an associated project.
Args:
request: (AiplatformProjectsLocationsStudiesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1ListStudiesResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/studies',
http_method='GET',
method_id='aiplatform.projects.locations.studies.list',
ordered_params=['parent'],
path_params=['parent'],
query_params=['pageSize', 'pageToken'],
relative_path='v1beta1/{+parent}/studies',
request_field='',
request_type_name='AiplatformProjectsLocationsStudiesListRequest',
response_type_name='GoogleCloudAiplatformV1beta1ListStudiesResponse',
supports_download=False,
)
def Lookup(self, request, global_params=None):
r"""Looks a study up using the user-defined display_name field instead of the fully qualified resource name.
Args:
request: (AiplatformProjectsLocationsStudiesLookupRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1Study) The response message.
"""
config = self.GetMethodConfig('Lookup')
return self._RunMethod(
config, request, global_params=global_params)
Lookup.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/studies:lookup',
http_method='POST',
method_id='aiplatform.projects.locations.studies.lookup',
ordered_params=['parent'],
path_params=['parent'],
query_params=[],
relative_path='v1beta1/{+parent}/studies:lookup',
request_field='googleCloudAiplatformV1beta1LookupStudyRequest',
request_type_name='AiplatformProjectsLocationsStudiesLookupRequest',
response_type_name='GoogleCloudAiplatformV1beta1Study',
supports_download=False,
)
class ProjectsLocationsTensorboardsExperimentsOperationsService(base_api.BaseApiService):
"""Service class for the projects_locations_tensorboards_experiments_operations resource."""
_NAME = 'projects_locations_tensorboards_experiments_operations'
def __init__(self, client):
super(AiplatformV1beta1.ProjectsLocationsTensorboardsExperimentsOperationsService, self).__init__(client)
self._upload_configs = {
}
def Cancel(self, request, global_params=None):
r"""Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.
Args:
request: (AiplatformProjectsLocationsTensorboardsExperimentsOperationsCancelRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Cancel')
return self._RunMethod(
config, request, global_params=global_params)
Cancel.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/tensorboards/{tensorboardsId}/experiments/{experimentsId}/operations/{operationsId}:cancel',
http_method='POST',
method_id='aiplatform.projects.locations.tensorboards.experiments.operations.cancel',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}:cancel',
request_field='',
request_type_name='AiplatformProjectsLocationsTensorboardsExperimentsOperationsCancelRequest',
response_type_name='GoogleProtobufEmpty',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes a long-running operation. This method indicates that the client is no longer interested in the operation result. It does not cancel the operation. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`.
Args:
request: (AiplatformProjectsLocationsTensorboardsExperimentsOperationsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/tensorboards/{tensorboardsId}/experiments/{experimentsId}/operations/{operationsId}',
http_method='DELETE',
method_id='aiplatform.projects.locations.tensorboards.experiments.operations.delete',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsTensorboardsExperimentsOperationsDeleteRequest',
response_type_name='GoogleProtobufEmpty',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets the latest state of a long-running operation. Clients can use this method to poll the operation result at intervals as recommended by the API service.
Args:
request: (AiplatformProjectsLocationsTensorboardsExperimentsOperationsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/tensorboards/{tensorboardsId}/experiments/{experimentsId}/operations/{operationsId}',
http_method='GET',
method_id='aiplatform.projects.locations.tensorboards.experiments.operations.get',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsTensorboardsExperimentsOperationsGetRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists operations that match the specified filter in the request. If the server doesn't support this method, it returns `UNIMPLEMENTED`. NOTE: the `name` binding allows API services to override the binding to use different resource name schemes, such as `users/*/operations`. To override the binding, API services can add a binding such as `"/v1/{name=users/*}/operations"` to their service configuration. For backwards compatibility, the default name includes the operations collection id, however overriding users must ensure the name binding is the parent resource, without the operations collection id.
Args:
request: (AiplatformProjectsLocationsTensorboardsExperimentsOperationsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningListOperationsResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/tensorboards/{tensorboardsId}/experiments/{experimentsId}/operations',
http_method='GET',
method_id='aiplatform.projects.locations.tensorboards.experiments.operations.list',
ordered_params=['name'],
path_params=['name'],
query_params=['filter', 'pageSize', 'pageToken'],
relative_path='v1beta1/{+name}/operations',
request_field='',
request_type_name='AiplatformProjectsLocationsTensorboardsExperimentsOperationsListRequest',
response_type_name='GoogleLongrunningListOperationsResponse',
supports_download=False,
)
def Wait(self, request, global_params=None):
r"""Waits until the specified long-running operation is done or reaches at most a specified timeout, returning the latest state. If the operation is already done, the latest state is immediately returned. If the timeout specified is greater than the default HTTP/RPC timeout, the HTTP/RPC timeout is used. If the server does not support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Note that this method is on a best-effort basis. It may return the latest state before the specified timeout (including immediately), meaning even an immediate response is no guarantee that the operation is done.
Args:
request: (AiplatformProjectsLocationsTensorboardsExperimentsOperationsWaitRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Wait')
return self._RunMethod(
config, request, global_params=global_params)
Wait.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/tensorboards/{tensorboardsId}/experiments/{experimentsId}/operations/{operationsId}:wait',
http_method='POST',
method_id='aiplatform.projects.locations.tensorboards.experiments.operations.wait',
ordered_params=['name'],
path_params=['name'],
query_params=['timeout'],
relative_path='v1beta1/{+name}:wait',
request_field='',
request_type_name='AiplatformProjectsLocationsTensorboardsExperimentsOperationsWaitRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
class ProjectsLocationsTensorboardsExperimentsRunsOperationsService(base_api.BaseApiService):
"""Service class for the projects_locations_tensorboards_experiments_runs_operations resource."""
_NAME = 'projects_locations_tensorboards_experiments_runs_operations'
def __init__(self, client):
super(AiplatformV1beta1.ProjectsLocationsTensorboardsExperimentsRunsOperationsService, self).__init__(client)
self._upload_configs = {
}
def Cancel(self, request, global_params=None):
r"""Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.
Args:
request: (AiplatformProjectsLocationsTensorboardsExperimentsRunsOperationsCancelRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Cancel')
return self._RunMethod(
config, request, global_params=global_params)
Cancel.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/tensorboards/{tensorboardsId}/experiments/{experimentsId}/runs/{runsId}/operations/{operationsId}:cancel',
http_method='POST',
method_id='aiplatform.projects.locations.tensorboards.experiments.runs.operations.cancel',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}:cancel',
request_field='',
request_type_name='AiplatformProjectsLocationsTensorboardsExperimentsRunsOperationsCancelRequest',
response_type_name='GoogleProtobufEmpty',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes a long-running operation. This method indicates that the client is no longer interested in the operation result. It does not cancel the operation. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`.
Args:
request: (AiplatformProjectsLocationsTensorboardsExperimentsRunsOperationsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/tensorboards/{tensorboardsId}/experiments/{experimentsId}/runs/{runsId}/operations/{operationsId}',
http_method='DELETE',
method_id='aiplatform.projects.locations.tensorboards.experiments.runs.operations.delete',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsTensorboardsExperimentsRunsOperationsDeleteRequest',
response_type_name='GoogleProtobufEmpty',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets the latest state of a long-running operation. Clients can use this method to poll the operation result at intervals as recommended by the API service.
Args:
request: (AiplatformProjectsLocationsTensorboardsExperimentsRunsOperationsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/tensorboards/{tensorboardsId}/experiments/{experimentsId}/runs/{runsId}/operations/{operationsId}',
http_method='GET',
method_id='aiplatform.projects.locations.tensorboards.experiments.runs.operations.get',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsTensorboardsExperimentsRunsOperationsGetRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists operations that match the specified filter in the request. If the server doesn't support this method, it returns `UNIMPLEMENTED`. NOTE: the `name` binding allows API services to override the binding to use different resource name schemes, such as `users/*/operations`. To override the binding, API services can add a binding such as `"/v1/{name=users/*}/operations"` to their service configuration. For backwards compatibility, the default name includes the operations collection id, however overriding users must ensure the name binding is the parent resource, without the operations collection id.
Args:
request: (AiplatformProjectsLocationsTensorboardsExperimentsRunsOperationsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningListOperationsResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/tensorboards/{tensorboardsId}/experiments/{experimentsId}/runs/{runsId}/operations',
http_method='GET',
method_id='aiplatform.projects.locations.tensorboards.experiments.runs.operations.list',
ordered_params=['name'],
path_params=['name'],
query_params=['filter', 'pageSize', 'pageToken'],
relative_path='v1beta1/{+name}/operations',
request_field='',
request_type_name='AiplatformProjectsLocationsTensorboardsExperimentsRunsOperationsListRequest',
response_type_name='GoogleLongrunningListOperationsResponse',
supports_download=False,
)
def Wait(self, request, global_params=None):
r"""Waits until the specified long-running operation is done or reaches at most a specified timeout, returning the latest state. If the operation is already done, the latest state is immediately returned. If the timeout specified is greater than the default HTTP/RPC timeout, the HTTP/RPC timeout is used. If the server does not support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Note that this method is on a best-effort basis. It may return the latest state before the specified timeout (including immediately), meaning even an immediate response is no guarantee that the operation is done.
Args:
request: (AiplatformProjectsLocationsTensorboardsExperimentsRunsOperationsWaitRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Wait')
return self._RunMethod(
config, request, global_params=global_params)
Wait.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/tensorboards/{tensorboardsId}/experiments/{experimentsId}/runs/{runsId}/operations/{operationsId}:wait',
http_method='POST',
method_id='aiplatform.projects.locations.tensorboards.experiments.runs.operations.wait',
ordered_params=['name'],
path_params=['name'],
query_params=['timeout'],
relative_path='v1beta1/{+name}:wait',
request_field='',
request_type_name='AiplatformProjectsLocationsTensorboardsExperimentsRunsOperationsWaitRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
class ProjectsLocationsTensorboardsExperimentsRunsTimeSeriesOperationsService(base_api.BaseApiService):
"""Service class for the projects_locations_tensorboards_experiments_runs_timeSeries_operations resource."""
_NAME = 'projects_locations_tensorboards_experiments_runs_timeSeries_operations'
def __init__(self, client):
super(AiplatformV1beta1.ProjectsLocationsTensorboardsExperimentsRunsTimeSeriesOperationsService, self).__init__(client)
self._upload_configs = {
}
def Cancel(self, request, global_params=None):
r"""Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.
Args:
request: (AiplatformProjectsLocationsTensorboardsExperimentsRunsTimeSeriesOperationsCancelRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Cancel')
return self._RunMethod(
config, request, global_params=global_params)
Cancel.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/tensorboards/{tensorboardsId}/experiments/{experimentsId}/runs/{runsId}/timeSeries/{timeSeriesId}/operations/{operationsId}:cancel',
http_method='POST',
method_id='aiplatform.projects.locations.tensorboards.experiments.runs.timeSeries.operations.cancel',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}:cancel',
request_field='',
request_type_name='AiplatformProjectsLocationsTensorboardsExperimentsRunsTimeSeriesOperationsCancelRequest',
response_type_name='GoogleProtobufEmpty',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes a long-running operation. This method indicates that the client is no longer interested in the operation result. It does not cancel the operation. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`.
Args:
request: (AiplatformProjectsLocationsTensorboardsExperimentsRunsTimeSeriesOperationsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/tensorboards/{tensorboardsId}/experiments/{experimentsId}/runs/{runsId}/timeSeries/{timeSeriesId}/operations/{operationsId}',
http_method='DELETE',
method_id='aiplatform.projects.locations.tensorboards.experiments.runs.timeSeries.operations.delete',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsTensorboardsExperimentsRunsTimeSeriesOperationsDeleteRequest',
response_type_name='GoogleProtobufEmpty',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets the latest state of a long-running operation. Clients can use this method to poll the operation result at intervals as recommended by the API service.
Args:
request: (AiplatformProjectsLocationsTensorboardsExperimentsRunsTimeSeriesOperationsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/tensorboards/{tensorboardsId}/experiments/{experimentsId}/runs/{runsId}/timeSeries/{timeSeriesId}/operations/{operationsId}',
http_method='GET',
method_id='aiplatform.projects.locations.tensorboards.experiments.runs.timeSeries.operations.get',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsTensorboardsExperimentsRunsTimeSeriesOperationsGetRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists operations that match the specified filter in the request. If the server doesn't support this method, it returns `UNIMPLEMENTED`. NOTE: the `name` binding allows API services to override the binding to use different resource name schemes, such as `users/*/operations`. To override the binding, API services can add a binding such as `"/v1/{name=users/*}/operations"` to their service configuration. For backwards compatibility, the default name includes the operations collection id, however overriding users must ensure the name binding is the parent resource, without the operations collection id.
Args:
request: (AiplatformProjectsLocationsTensorboardsExperimentsRunsTimeSeriesOperationsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningListOperationsResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/tensorboards/{tensorboardsId}/experiments/{experimentsId}/runs/{runsId}/timeSeries/{timeSeriesId}/operations',
http_method='GET',
method_id='aiplatform.projects.locations.tensorboards.experiments.runs.timeSeries.operations.list',
ordered_params=['name'],
path_params=['name'],
query_params=['filter', 'pageSize', 'pageToken'],
relative_path='v1beta1/{+name}/operations',
request_field='',
request_type_name='AiplatformProjectsLocationsTensorboardsExperimentsRunsTimeSeriesOperationsListRequest',
response_type_name='GoogleLongrunningListOperationsResponse',
supports_download=False,
)
def Wait(self, request, global_params=None):
r"""Waits until the specified long-running operation is done or reaches at most a specified timeout, returning the latest state. If the operation is already done, the latest state is immediately returned. If the timeout specified is greater than the default HTTP/RPC timeout, the HTTP/RPC timeout is used. If the server does not support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Note that this method is on a best-effort basis. It may return the latest state before the specified timeout (including immediately), meaning even an immediate response is no guarantee that the operation is done.
Args:
request: (AiplatformProjectsLocationsTensorboardsExperimentsRunsTimeSeriesOperationsWaitRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Wait')
return self._RunMethod(
config, request, global_params=global_params)
Wait.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/tensorboards/{tensorboardsId}/experiments/{experimentsId}/runs/{runsId}/timeSeries/{timeSeriesId}/operations/{operationsId}:wait',
http_method='POST',
method_id='aiplatform.projects.locations.tensorboards.experiments.runs.timeSeries.operations.wait',
ordered_params=['name'],
path_params=['name'],
query_params=['timeout'],
relative_path='v1beta1/{+name}:wait',
request_field='',
request_type_name='AiplatformProjectsLocationsTensorboardsExperimentsRunsTimeSeriesOperationsWaitRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
class ProjectsLocationsTensorboardsExperimentsRunsTimeSeriesService(base_api.BaseApiService):
"""Service class for the projects_locations_tensorboards_experiments_runs_timeSeries resource."""
_NAME = 'projects_locations_tensorboards_experiments_runs_timeSeries'
def __init__(self, client):
super(AiplatformV1beta1.ProjectsLocationsTensorboardsExperimentsRunsTimeSeriesService, self).__init__(client)
self._upload_configs = {
}
def BatchCreate(self, request, global_params=None):
r"""Batch create TensorboardTimeSeries that belong to a TensorboardExperiment.
Args:
request: (AiplatformProjectsLocationsTensorboardsExperimentsRunsTimeSeriesBatchCreateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1BatchCreateTensorboardTimeSeriesResponse) The response message.
"""
config = self.GetMethodConfig('BatchCreate')
return self._RunMethod(
config, request, global_params=global_params)
BatchCreate.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/tensorboards/{tensorboardsId}/experiments/{experimentsId}/runs/{runsId}/timeSeries:batchCreate',
http_method='POST',
method_id='aiplatform.projects.locations.tensorboards.experiments.runs.timeSeries.batchCreate',
ordered_params=['parent', 'runsId'],
path_params=['parent', 'runsId'],
query_params=[],
relative_path='v1beta1/{+parent}/runs/{runsId}/timeSeries:batchCreate',
request_field='googleCloudAiplatformV1beta1BatchCreateTensorboardTimeSeriesRequest',
request_type_name='AiplatformProjectsLocationsTensorboardsExperimentsRunsTimeSeriesBatchCreateRequest',
response_type_name='GoogleCloudAiplatformV1beta1BatchCreateTensorboardTimeSeriesResponse',
supports_download=False,
)
def BatchRead(self, request, global_params=None):
r"""Reads multiple TensorboardTimeSeries' data. The data point number limit is 1000 for scalars, 100 for tensors and blob references. If the number of data points stored is less than the limit, all data will be returned. Otherwise, that limit number of data points will be randomly selected from this time series and returned.
Args:
request: (AiplatformProjectsLocationsTensorboardsExperimentsRunsTimeSeriesBatchReadRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1BatchReadTensorboardTimeSeriesDataResponse) The response message.
"""
config = self.GetMethodConfig('BatchRead')
return self._RunMethod(
config, request, global_params=global_params)
BatchRead.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/tensorboards/{tensorboardsId}/experiments/{experimentsId}/runs/{runsId}/timeSeries:batchRead',
http_method='GET',
method_id='aiplatform.projects.locations.tensorboards.experiments.runs.timeSeries.batchRead',
ordered_params=['tensorboard', 'experimentsId', 'runsId'],
path_params=['experimentsId', 'runsId', 'tensorboard'],
query_params=['timeSeries'],
relative_path='v1beta1/{+tensorboard}/experiments/{experimentsId}/runs/{runsId}/timeSeries:batchRead',
request_field='',
request_type_name='AiplatformProjectsLocationsTensorboardsExperimentsRunsTimeSeriesBatchReadRequest',
response_type_name='GoogleCloudAiplatformV1beta1BatchReadTensorboardTimeSeriesDataResponse',
supports_download=False,
)
def Create(self, request, global_params=None):
r"""Creates a TensorboardTimeSeries.
Args:
request: (AiplatformProjectsLocationsTensorboardsExperimentsRunsTimeSeriesCreateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1TensorboardTimeSeries) The response message.
"""
config = self.GetMethodConfig('Create')
return self._RunMethod(
config, request, global_params=global_params)
Create.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/tensorboards/{tensorboardsId}/experiments/{experimentsId}/runs/{runsId}/timeSeries',
http_method='POST',
method_id='aiplatform.projects.locations.tensorboards.experiments.runs.timeSeries.create',
ordered_params=['parent'],
path_params=['parent'],
query_params=['tensorboardTimeSeriesId'],
relative_path='v1beta1/{+parent}/timeSeries',
request_field='googleCloudAiplatformV1beta1TensorboardTimeSeries',
request_type_name='AiplatformProjectsLocationsTensorboardsExperimentsRunsTimeSeriesCreateRequest',
response_type_name='GoogleCloudAiplatformV1beta1TensorboardTimeSeries',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes a TensorboardTimeSeries.
Args:
request: (AiplatformProjectsLocationsTensorboardsExperimentsRunsTimeSeriesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/tensorboards/{tensorboardsId}/experiments/{experimentsId}/runs/{runsId}/timeSeries/{timeSeriesId}',
http_method='DELETE',
method_id='aiplatform.projects.locations.tensorboards.experiments.runs.timeSeries.delete',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsTensorboardsExperimentsRunsTimeSeriesDeleteRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def ExportTensorboardTimeSeries(self, request, global_params=None):
r"""Exports a TensorboardTimeSeries' data. Data is returned in paginated responses.
Args:
request: (AiplatformProjectsLocationsTensorboardsExperimentsRunsTimeSeriesExportTensorboardTimeSeriesRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1ExportTensorboardTimeSeriesDataResponse) The response message.
"""
config = self.GetMethodConfig('ExportTensorboardTimeSeries')
return self._RunMethod(
config, request, global_params=global_params)
ExportTensorboardTimeSeries.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/tensorboards/{tensorboardsId}/experiments/{experimentsId}/runs/{runsId}/timeSeries/{timeSeriesId}:exportTensorboardTimeSeries',
http_method='POST',
method_id='aiplatform.projects.locations.tensorboards.experiments.runs.timeSeries.exportTensorboardTimeSeries',
ordered_params=['tensorboardTimeSeries'],
path_params=['tensorboardTimeSeries'],
query_params=[],
relative_path='v1beta1/{+tensorboardTimeSeries}:exportTensorboardTimeSeries',
request_field='googleCloudAiplatformV1beta1ExportTensorboardTimeSeriesDataRequest',
request_type_name='AiplatformProjectsLocationsTensorboardsExperimentsRunsTimeSeriesExportTensorboardTimeSeriesRequest',
response_type_name='GoogleCloudAiplatformV1beta1ExportTensorboardTimeSeriesDataResponse',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets a TensorboardTimeSeries.
Args:
request: (AiplatformProjectsLocationsTensorboardsExperimentsRunsTimeSeriesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1TensorboardTimeSeries) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/tensorboards/{tensorboardsId}/experiments/{experimentsId}/runs/{runsId}/timeSeries/{timeSeriesId}',
http_method='GET',
method_id='aiplatform.projects.locations.tensorboards.experiments.runs.timeSeries.get',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsTensorboardsExperimentsRunsTimeSeriesGetRequest',
response_type_name='GoogleCloudAiplatformV1beta1TensorboardTimeSeries',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists TensorboardTimeSeries in a Location.
Args:
request: (AiplatformProjectsLocationsTensorboardsExperimentsRunsTimeSeriesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1ListTensorboardTimeSeriesResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/tensorboards/{tensorboardsId}/experiments/{experimentsId}/runs/{runsId}/timeSeries',
http_method='GET',
method_id='aiplatform.projects.locations.tensorboards.experiments.runs.timeSeries.list',
ordered_params=['parent'],
path_params=['parent'],
query_params=['filter', 'orderBy', 'pageSize', 'pageToken', 'readMask'],
relative_path='v1beta1/{+parent}/timeSeries',
request_field='',
request_type_name='AiplatformProjectsLocationsTensorboardsExperimentsRunsTimeSeriesListRequest',
response_type_name='GoogleCloudAiplatformV1beta1ListTensorboardTimeSeriesResponse',
supports_download=False,
)
def Patch(self, request, global_params=None):
r"""Updates a TensorboardTimeSeries.
Args:
request: (AiplatformProjectsLocationsTensorboardsExperimentsRunsTimeSeriesPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1TensorboardTimeSeries) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/tensorboards/{tensorboardsId}/experiments/{experimentsId}/runs/{runsId}/timeSeries/{timeSeriesId}',
http_method='PATCH',
method_id='aiplatform.projects.locations.tensorboards.experiments.runs.timeSeries.patch',
ordered_params=['name'],
path_params=['name'],
query_params=['updateMask'],
relative_path='v1beta1/{+name}',
request_field='googleCloudAiplatformV1beta1TensorboardTimeSeries',
request_type_name='AiplatformProjectsLocationsTensorboardsExperimentsRunsTimeSeriesPatchRequest',
response_type_name='GoogleCloudAiplatformV1beta1TensorboardTimeSeries',
supports_download=False,
)
def Read(self, request, global_params=None):
r"""Reads a TensorboardTimeSeries' data. By default, if the number of data points stored is less than 1000, all data will be returned. Otherwise, 1000 data points will be randomly selected from this time series and returned. This value can be changed by changing max_data_points, which can't be greater than 10k.
Args:
request: (AiplatformProjectsLocationsTensorboardsExperimentsRunsTimeSeriesReadRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1ReadTensorboardTimeSeriesDataResponse) The response message.
"""
config = self.GetMethodConfig('Read')
return self._RunMethod(
config, request, global_params=global_params)
Read.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/tensorboards/{tensorboardsId}/experiments/{experimentsId}/runs/{runsId}/timeSeries/{timeSeriesId}:read',
http_method='GET',
method_id='aiplatform.projects.locations.tensorboards.experiments.runs.timeSeries.read',
ordered_params=['tensorboardTimeSeries'],
path_params=['tensorboardTimeSeries'],
query_params=['filter', 'maxDataPoints'],
relative_path='v1beta1/{+tensorboardTimeSeries}:read',
request_field='',
request_type_name='AiplatformProjectsLocationsTensorboardsExperimentsRunsTimeSeriesReadRequest',
response_type_name='GoogleCloudAiplatformV1beta1ReadTensorboardTimeSeriesDataResponse',
supports_download=False,
)
def ReadBlobData(self, request, global_params=None):
r"""Gets bytes of TensorboardBlobs. This is to allow reading blob data stored in consumer project's Cloud Storage bucket without users having to obtain Cloud Storage access permission.
Args:
request: (AiplatformProjectsLocationsTensorboardsExperimentsRunsTimeSeriesReadBlobDataRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1ReadTensorboardBlobDataResponse) The response message.
"""
config = self.GetMethodConfig('ReadBlobData')
return self._RunMethod(
config, request, global_params=global_params)
ReadBlobData.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/tensorboards/{tensorboardsId}/experiments/{experimentsId}/runs/{runsId}/timeSeries/{timeSeriesId}:readBlobData',
http_method='GET',
method_id='aiplatform.projects.locations.tensorboards.experiments.runs.timeSeries.readBlobData',
ordered_params=['timeSeries'],
path_params=['timeSeries'],
query_params=['blobIds'],
relative_path='v1beta1/{+timeSeries}:readBlobData',
request_field='',
request_type_name='AiplatformProjectsLocationsTensorboardsExperimentsRunsTimeSeriesReadBlobDataRequest',
response_type_name='GoogleCloudAiplatformV1beta1ReadTensorboardBlobDataResponse',
supports_download=False,
)
class ProjectsLocationsTensorboardsExperimentsRunsService(base_api.BaseApiService):
"""Service class for the projects_locations_tensorboards_experiments_runs resource."""
_NAME = 'projects_locations_tensorboards_experiments_runs'
def __init__(self, client):
super(AiplatformV1beta1.ProjectsLocationsTensorboardsExperimentsRunsService, self).__init__(client)
self._upload_configs = {
}
def BatchCreate(self, request, global_params=None):
r"""Batch create TensorboardRuns.
Args:
request: (AiplatformProjectsLocationsTensorboardsExperimentsRunsBatchCreateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1BatchCreateTensorboardRunsResponse) The response message.
"""
config = self.GetMethodConfig('BatchCreate')
return self._RunMethod(
config, request, global_params=global_params)
BatchCreate.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/tensorboards/{tensorboardsId}/experiments/{experimentsId}/runs:batchCreate',
http_method='POST',
method_id='aiplatform.projects.locations.tensorboards.experiments.runs.batchCreate',
ordered_params=['parent'],
path_params=['parent'],
query_params=[],
relative_path='v1beta1/{+parent}/runs:batchCreate',
request_field='googleCloudAiplatformV1beta1BatchCreateTensorboardRunsRequest',
request_type_name='AiplatformProjectsLocationsTensorboardsExperimentsRunsBatchCreateRequest',
response_type_name='GoogleCloudAiplatformV1beta1BatchCreateTensorboardRunsResponse',
supports_download=False,
)
def Create(self, request, global_params=None):
r"""Creates a TensorboardRun.
Args:
request: (AiplatformProjectsLocationsTensorboardsExperimentsRunsCreateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1TensorboardRun) The response message.
"""
config = self.GetMethodConfig('Create')
return self._RunMethod(
config, request, global_params=global_params)
Create.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/tensorboards/{tensorboardsId}/experiments/{experimentsId}/runs',
http_method='POST',
method_id='aiplatform.projects.locations.tensorboards.experiments.runs.create',
ordered_params=['parent'],
path_params=['parent'],
query_params=['tensorboardRunId'],
relative_path='v1beta1/{+parent}/runs',
request_field='googleCloudAiplatformV1beta1TensorboardRun',
request_type_name='AiplatformProjectsLocationsTensorboardsExperimentsRunsCreateRequest',
response_type_name='GoogleCloudAiplatformV1beta1TensorboardRun',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes a TensorboardRun.
Args:
request: (AiplatformProjectsLocationsTensorboardsExperimentsRunsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/tensorboards/{tensorboardsId}/experiments/{experimentsId}/runs/{runsId}',
http_method='DELETE',
method_id='aiplatform.projects.locations.tensorboards.experiments.runs.delete',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsTensorboardsExperimentsRunsDeleteRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets a TensorboardRun.
Args:
request: (AiplatformProjectsLocationsTensorboardsExperimentsRunsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1TensorboardRun) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/tensorboards/{tensorboardsId}/experiments/{experimentsId}/runs/{runsId}',
http_method='GET',
method_id='aiplatform.projects.locations.tensorboards.experiments.runs.get',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsTensorboardsExperimentsRunsGetRequest',
response_type_name='GoogleCloudAiplatformV1beta1TensorboardRun',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists TensorboardRuns in a Location.
Args:
request: (AiplatformProjectsLocationsTensorboardsExperimentsRunsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1ListTensorboardRunsResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/tensorboards/{tensorboardsId}/experiments/{experimentsId}/runs',
http_method='GET',
method_id='aiplatform.projects.locations.tensorboards.experiments.runs.list',
ordered_params=['parent'],
path_params=['parent'],
query_params=['filter', 'orderBy', 'pageSize', 'pageToken', 'readMask'],
relative_path='v1beta1/{+parent}/runs',
request_field='',
request_type_name='AiplatformProjectsLocationsTensorboardsExperimentsRunsListRequest',
response_type_name='GoogleCloudAiplatformV1beta1ListTensorboardRunsResponse',
supports_download=False,
)
def Patch(self, request, global_params=None):
r"""Updates a TensorboardRun.
Args:
request: (AiplatformProjectsLocationsTensorboardsExperimentsRunsPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1TensorboardRun) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/tensorboards/{tensorboardsId}/experiments/{experimentsId}/runs/{runsId}',
http_method='PATCH',
method_id='aiplatform.projects.locations.tensorboards.experiments.runs.patch',
ordered_params=['name'],
path_params=['name'],
query_params=['updateMask'],
relative_path='v1beta1/{+name}',
request_field='googleCloudAiplatformV1beta1TensorboardRun',
request_type_name='AiplatformProjectsLocationsTensorboardsExperimentsRunsPatchRequest',
response_type_name='GoogleCloudAiplatformV1beta1TensorboardRun',
supports_download=False,
)
def Write(self, request, global_params=None):
r"""Write time series data points into multiple TensorboardTimeSeries under a TensorboardRun. If any data fail to be ingested, an error will be returned.
Args:
request: (GoogleCloudAiplatformV1beta1WriteTensorboardRunDataRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1WriteTensorboardRunDataResponse) The response message.
"""
config = self.GetMethodConfig('Write')
return self._RunMethod(
config, request, global_params=global_params)
Write.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/tensorboards/{tensorboardsId}/experiments/{experimentsId}/runs/{runsId}:write',
http_method='POST',
method_id='aiplatform.projects.locations.tensorboards.experiments.runs.write',
ordered_params=['tensorboardRun'],
path_params=['tensorboardRun'],
query_params=[],
relative_path='v1beta1/{+tensorboardRun}:write',
request_field='<request>',
request_type_name='GoogleCloudAiplatformV1beta1WriteTensorboardRunDataRequest',
response_type_name='GoogleCloudAiplatformV1beta1WriteTensorboardRunDataResponse',
supports_download=False,
)
class ProjectsLocationsTensorboardsExperimentsService(base_api.BaseApiService):
"""Service class for the projects_locations_tensorboards_experiments resource."""
_NAME = 'projects_locations_tensorboards_experiments'
def __init__(self, client):
super(AiplatformV1beta1.ProjectsLocationsTensorboardsExperimentsService, self).__init__(client)
self._upload_configs = {
}
def Create(self, request, global_params=None):
r"""Creates a TensorboardExperiment.
Args:
request: (AiplatformProjectsLocationsTensorboardsExperimentsCreateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1TensorboardExperiment) The response message.
"""
config = self.GetMethodConfig('Create')
return self._RunMethod(
config, request, global_params=global_params)
Create.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/tensorboards/{tensorboardsId}/experiments',
http_method='POST',
method_id='aiplatform.projects.locations.tensorboards.experiments.create',
ordered_params=['parent'],
path_params=['parent'],
query_params=['tensorboardExperimentId'],
relative_path='v1beta1/{+parent}/experiments',
request_field='googleCloudAiplatformV1beta1TensorboardExperiment',
request_type_name='AiplatformProjectsLocationsTensorboardsExperimentsCreateRequest',
response_type_name='GoogleCloudAiplatformV1beta1TensorboardExperiment',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes a TensorboardExperiment.
Args:
request: (AiplatformProjectsLocationsTensorboardsExperimentsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/tensorboards/{tensorboardsId}/experiments/{experimentsId}',
http_method='DELETE',
method_id='aiplatform.projects.locations.tensorboards.experiments.delete',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsTensorboardsExperimentsDeleteRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets a TensorboardExperiment.
Args:
request: (AiplatformProjectsLocationsTensorboardsExperimentsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1TensorboardExperiment) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/tensorboards/{tensorboardsId}/experiments/{experimentsId}',
http_method='GET',
method_id='aiplatform.projects.locations.tensorboards.experiments.get',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsTensorboardsExperimentsGetRequest',
response_type_name='GoogleCloudAiplatformV1beta1TensorboardExperiment',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists TensorboardExperiments in a Location.
Args:
request: (AiplatformProjectsLocationsTensorboardsExperimentsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1ListTensorboardExperimentsResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/tensorboards/{tensorboardsId}/experiments',
http_method='GET',
method_id='aiplatform.projects.locations.tensorboards.experiments.list',
ordered_params=['parent'],
path_params=['parent'],
query_params=['filter', 'orderBy', 'pageSize', 'pageToken', 'readMask'],
relative_path='v1beta1/{+parent}/experiments',
request_field='',
request_type_name='AiplatformProjectsLocationsTensorboardsExperimentsListRequest',
response_type_name='GoogleCloudAiplatformV1beta1ListTensorboardExperimentsResponse',
supports_download=False,
)
def Patch(self, request, global_params=None):
r"""Updates a TensorboardExperiment.
Args:
request: (AiplatformProjectsLocationsTensorboardsExperimentsPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1TensorboardExperiment) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/tensorboards/{tensorboardsId}/experiments/{experimentsId}',
http_method='PATCH',
method_id='aiplatform.projects.locations.tensorboards.experiments.patch',
ordered_params=['name'],
path_params=['name'],
query_params=['updateMask'],
relative_path='v1beta1/{+name}',
request_field='googleCloudAiplatformV1beta1TensorboardExperiment',
request_type_name='AiplatformProjectsLocationsTensorboardsExperimentsPatchRequest',
response_type_name='GoogleCloudAiplatformV1beta1TensorboardExperiment',
supports_download=False,
)
def Write(self, request, global_params=None):
r"""Write time series data points of multiple TensorboardTimeSeries in multiple TensorboardRun's. If any data fail to be ingested, an error will be returned.
Args:
request: (AiplatformProjectsLocationsTensorboardsExperimentsWriteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1WriteTensorboardExperimentDataResponse) The response message.
"""
config = self.GetMethodConfig('Write')
return self._RunMethod(
config, request, global_params=global_params)
Write.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/tensorboards/{tensorboardsId}/experiments/{experimentsId}:write',
http_method='POST',
method_id='aiplatform.projects.locations.tensorboards.experiments.write',
ordered_params=['tensorboardExperiment'],
path_params=['tensorboardExperiment'],
query_params=[],
relative_path='v1beta1/{+tensorboardExperiment}:write',
request_field='googleCloudAiplatformV1beta1WriteTensorboardExperimentDataRequest',
request_type_name='AiplatformProjectsLocationsTensorboardsExperimentsWriteRequest',
response_type_name='GoogleCloudAiplatformV1beta1WriteTensorboardExperimentDataResponse',
supports_download=False,
)
class ProjectsLocationsTensorboardsOperationsService(base_api.BaseApiService):
"""Service class for the projects_locations_tensorboards_operations resource."""
_NAME = 'projects_locations_tensorboards_operations'
def __init__(self, client):
super(AiplatformV1beta1.ProjectsLocationsTensorboardsOperationsService, self).__init__(client)
self._upload_configs = {
}
def Cancel(self, request, global_params=None):
r"""Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.
Args:
request: (AiplatformProjectsLocationsTensorboardsOperationsCancelRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Cancel')
return self._RunMethod(
config, request, global_params=global_params)
Cancel.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/tensorboards/{tensorboardsId}/operations/{operationsId}:cancel',
http_method='POST',
method_id='aiplatform.projects.locations.tensorboards.operations.cancel',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}:cancel',
request_field='',
request_type_name='AiplatformProjectsLocationsTensorboardsOperationsCancelRequest',
response_type_name='GoogleProtobufEmpty',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes a long-running operation. This method indicates that the client is no longer interested in the operation result. It does not cancel the operation. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`.
Args:
request: (AiplatformProjectsLocationsTensorboardsOperationsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/tensorboards/{tensorboardsId}/operations/{operationsId}',
http_method='DELETE',
method_id='aiplatform.projects.locations.tensorboards.operations.delete',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsTensorboardsOperationsDeleteRequest',
response_type_name='GoogleProtobufEmpty',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets the latest state of a long-running operation. Clients can use this method to poll the operation result at intervals as recommended by the API service.
Args:
request: (AiplatformProjectsLocationsTensorboardsOperationsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/tensorboards/{tensorboardsId}/operations/{operationsId}',
http_method='GET',
method_id='aiplatform.projects.locations.tensorboards.operations.get',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsTensorboardsOperationsGetRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists operations that match the specified filter in the request. If the server doesn't support this method, it returns `UNIMPLEMENTED`. NOTE: the `name` binding allows API services to override the binding to use different resource name schemes, such as `users/*/operations`. To override the binding, API services can add a binding such as `"/v1/{name=users/*}/operations"` to their service configuration. For backwards compatibility, the default name includes the operations collection id, however overriding users must ensure the name binding is the parent resource, without the operations collection id.
Args:
request: (AiplatformProjectsLocationsTensorboardsOperationsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningListOperationsResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/tensorboards/{tensorboardsId}/operations',
http_method='GET',
method_id='aiplatform.projects.locations.tensorboards.operations.list',
ordered_params=['name'],
path_params=['name'],
query_params=['filter', 'pageSize', 'pageToken'],
relative_path='v1beta1/{+name}/operations',
request_field='',
request_type_name='AiplatformProjectsLocationsTensorboardsOperationsListRequest',
response_type_name='GoogleLongrunningListOperationsResponse',
supports_download=False,
)
def Wait(self, request, global_params=None):
r"""Waits until the specified long-running operation is done or reaches at most a specified timeout, returning the latest state. If the operation is already done, the latest state is immediately returned. If the timeout specified is greater than the default HTTP/RPC timeout, the HTTP/RPC timeout is used. If the server does not support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Note that this method is on a best-effort basis. It may return the latest state before the specified timeout (including immediately), meaning even an immediate response is no guarantee that the operation is done.
Args:
request: (AiplatformProjectsLocationsTensorboardsOperationsWaitRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Wait')
return self._RunMethod(
config, request, global_params=global_params)
Wait.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/tensorboards/{tensorboardsId}/operations/{operationsId}:wait',
http_method='POST',
method_id='aiplatform.projects.locations.tensorboards.operations.wait',
ordered_params=['name'],
path_params=['name'],
query_params=['timeout'],
relative_path='v1beta1/{+name}:wait',
request_field='',
request_type_name='AiplatformProjectsLocationsTensorboardsOperationsWaitRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
class ProjectsLocationsTensorboardsService(base_api.BaseApiService):
"""Service class for the projects_locations_tensorboards resource."""
_NAME = 'projects_locations_tensorboards'
def __init__(self, client):
super(AiplatformV1beta1.ProjectsLocationsTensorboardsService, self).__init__(client)
self._upload_configs = {
}
def Create(self, request, global_params=None):
r"""Creates a Tensorboard.
Args:
request: (AiplatformProjectsLocationsTensorboardsCreateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Create')
return self._RunMethod(
config, request, global_params=global_params)
Create.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/tensorboards',
http_method='POST',
method_id='aiplatform.projects.locations.tensorboards.create',
ordered_params=['parent'],
path_params=['parent'],
query_params=[],
relative_path='v1beta1/{+parent}/tensorboards',
request_field='googleCloudAiplatformV1beta1Tensorboard',
request_type_name='AiplatformProjectsLocationsTensorboardsCreateRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes a Tensorboard.
Args:
request: (AiplatformProjectsLocationsTensorboardsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/tensorboards/{tensorboardsId}',
http_method='DELETE',
method_id='aiplatform.projects.locations.tensorboards.delete',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsTensorboardsDeleteRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets a Tensorboard.
Args:
request: (AiplatformProjectsLocationsTensorboardsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1Tensorboard) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/tensorboards/{tensorboardsId}',
http_method='GET',
method_id='aiplatform.projects.locations.tensorboards.get',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsTensorboardsGetRequest',
response_type_name='GoogleCloudAiplatformV1beta1Tensorboard',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists Tensorboards in a Location.
Args:
request: (AiplatformProjectsLocationsTensorboardsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1ListTensorboardsResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/tensorboards',
http_method='GET',
method_id='aiplatform.projects.locations.tensorboards.list',
ordered_params=['parent'],
path_params=['parent'],
query_params=['filter', 'orderBy', 'pageSize', 'pageToken', 'readMask'],
relative_path='v1beta1/{+parent}/tensorboards',
request_field='',
request_type_name='AiplatformProjectsLocationsTensorboardsListRequest',
response_type_name='GoogleCloudAiplatformV1beta1ListTensorboardsResponse',
supports_download=False,
)
def Patch(self, request, global_params=None):
r"""Updates a Tensorboard.
Args:
request: (AiplatformProjectsLocationsTensorboardsPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/tensorboards/{tensorboardsId}',
http_method='PATCH',
method_id='aiplatform.projects.locations.tensorboards.patch',
ordered_params=['name'],
path_params=['name'],
query_params=['updateMask'],
relative_path='v1beta1/{+name}',
request_field='googleCloudAiplatformV1beta1Tensorboard',
request_type_name='AiplatformProjectsLocationsTensorboardsPatchRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
class ProjectsLocationsTrainingPipelinesOperationsService(base_api.BaseApiService):
"""Service class for the projects_locations_trainingPipelines_operations resource."""
_NAME = 'projects_locations_trainingPipelines_operations'
def __init__(self, client):
super(AiplatformV1beta1.ProjectsLocationsTrainingPipelinesOperationsService, self).__init__(client)
self._upload_configs = {
}
def Cancel(self, request, global_params=None):
r"""Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.
Args:
request: (AiplatformProjectsLocationsTrainingPipelinesOperationsCancelRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Cancel')
return self._RunMethod(
config, request, global_params=global_params)
Cancel.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/trainingPipelines/{trainingPipelinesId}/operations/{operationsId}:cancel',
http_method='POST',
method_id='aiplatform.projects.locations.trainingPipelines.operations.cancel',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}:cancel',
request_field='',
request_type_name='AiplatformProjectsLocationsTrainingPipelinesOperationsCancelRequest',
response_type_name='GoogleProtobufEmpty',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes a long-running operation. This method indicates that the client is no longer interested in the operation result. It does not cancel the operation. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`.
Args:
request: (AiplatformProjectsLocationsTrainingPipelinesOperationsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/trainingPipelines/{trainingPipelinesId}/operations/{operationsId}',
http_method='DELETE',
method_id='aiplatform.projects.locations.trainingPipelines.operations.delete',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsTrainingPipelinesOperationsDeleteRequest',
response_type_name='GoogleProtobufEmpty',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets the latest state of a long-running operation. Clients can use this method to poll the operation result at intervals as recommended by the API service.
Args:
request: (AiplatformProjectsLocationsTrainingPipelinesOperationsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/trainingPipelines/{trainingPipelinesId}/operations/{operationsId}',
http_method='GET',
method_id='aiplatform.projects.locations.trainingPipelines.operations.get',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsTrainingPipelinesOperationsGetRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists operations that match the specified filter in the request. If the server doesn't support this method, it returns `UNIMPLEMENTED`. NOTE: the `name` binding allows API services to override the binding to use different resource name schemes, such as `users/*/operations`. To override the binding, API services can add a binding such as `"/v1/{name=users/*}/operations"` to their service configuration. For backwards compatibility, the default name includes the operations collection id, however overriding users must ensure the name binding is the parent resource, without the operations collection id.
Args:
request: (AiplatformProjectsLocationsTrainingPipelinesOperationsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningListOperationsResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/trainingPipelines/{trainingPipelinesId}/operations',
http_method='GET',
method_id='aiplatform.projects.locations.trainingPipelines.operations.list',
ordered_params=['name'],
path_params=['name'],
query_params=['filter', 'pageSize', 'pageToken'],
relative_path='v1beta1/{+name}/operations',
request_field='',
request_type_name='AiplatformProjectsLocationsTrainingPipelinesOperationsListRequest',
response_type_name='GoogleLongrunningListOperationsResponse',
supports_download=False,
)
def Wait(self, request, global_params=None):
r"""Waits until the specified long-running operation is done or reaches at most a specified timeout, returning the latest state. If the operation is already done, the latest state is immediately returned. If the timeout specified is greater than the default HTTP/RPC timeout, the HTTP/RPC timeout is used. If the server does not support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Note that this method is on a best-effort basis. It may return the latest state before the specified timeout (including immediately), meaning even an immediate response is no guarantee that the operation is done.
Args:
request: (AiplatformProjectsLocationsTrainingPipelinesOperationsWaitRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Wait')
return self._RunMethod(
config, request, global_params=global_params)
Wait.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/trainingPipelines/{trainingPipelinesId}/operations/{operationsId}:wait',
http_method='POST',
method_id='aiplatform.projects.locations.trainingPipelines.operations.wait',
ordered_params=['name'],
path_params=['name'],
query_params=['timeout'],
relative_path='v1beta1/{+name}:wait',
request_field='',
request_type_name='AiplatformProjectsLocationsTrainingPipelinesOperationsWaitRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
class ProjectsLocationsTrainingPipelinesService(base_api.BaseApiService):
"""Service class for the projects_locations_trainingPipelines resource."""
_NAME = 'projects_locations_trainingPipelines'
def __init__(self, client):
super(AiplatformV1beta1.ProjectsLocationsTrainingPipelinesService, self).__init__(client)
self._upload_configs = {
}
def Cancel(self, request, global_params=None):
r"""Cancels a TrainingPipeline. Starts asynchronous cancellation on the TrainingPipeline. The server makes a best effort to cancel the pipeline, but success is not guaranteed. Clients can use PipelineService.GetTrainingPipeline or other methods to check whether the cancellation succeeded or whether the pipeline completed despite cancellation. On successful cancellation, the TrainingPipeline is not deleted; instead it becomes a pipeline with a TrainingPipeline.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`, and TrainingPipeline.state is set to `CANCELLED`.
Args:
request: (AiplatformProjectsLocationsTrainingPipelinesCancelRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Cancel')
return self._RunMethod(
config, request, global_params=global_params)
Cancel.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/trainingPipelines/{trainingPipelinesId}:cancel',
http_method='POST',
method_id='aiplatform.projects.locations.trainingPipelines.cancel',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}:cancel',
request_field='googleCloudAiplatformV1beta1CancelTrainingPipelineRequest',
request_type_name='AiplatformProjectsLocationsTrainingPipelinesCancelRequest',
response_type_name='GoogleProtobufEmpty',
supports_download=False,
)
def Create(self, request, global_params=None):
r"""Creates a TrainingPipeline. A created TrainingPipeline right away will be attempted to be run.
Args:
request: (AiplatformProjectsLocationsTrainingPipelinesCreateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1TrainingPipeline) The response message.
"""
config = self.GetMethodConfig('Create')
return self._RunMethod(
config, request, global_params=global_params)
Create.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/trainingPipelines',
http_method='POST',
method_id='aiplatform.projects.locations.trainingPipelines.create',
ordered_params=['parent'],
path_params=['parent'],
query_params=[],
relative_path='v1beta1/{+parent}/trainingPipelines',
request_field='googleCloudAiplatformV1beta1TrainingPipeline',
request_type_name='AiplatformProjectsLocationsTrainingPipelinesCreateRequest',
response_type_name='GoogleCloudAiplatformV1beta1TrainingPipeline',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes a TrainingPipeline.
Args:
request: (AiplatformProjectsLocationsTrainingPipelinesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleLongrunningOperation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/trainingPipelines/{trainingPipelinesId}',
http_method='DELETE',
method_id='aiplatform.projects.locations.trainingPipelines.delete',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsTrainingPipelinesDeleteRequest',
response_type_name='GoogleLongrunningOperation',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets a TrainingPipeline.
Args:
request: (AiplatformProjectsLocationsTrainingPipelinesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1TrainingPipeline) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/trainingPipelines/{trainingPipelinesId}',
http_method='GET',
method_id='aiplatform.projects.locations.trainingPipelines.get',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsTrainingPipelinesGetRequest',
response_type_name='GoogleCloudAiplatformV1beta1TrainingPipeline',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists TrainingPipelines in a Location.
Args:
request: (AiplatformProjectsLocationsTrainingPipelinesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudAiplatformV1beta1ListTrainingPipelinesResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/trainingPipelines',
http_method='GET',
method_id='aiplatform.projects.locations.trainingPipelines.list',
ordered_params=['parent'],
path_params=['parent'],
query_params=['filter', 'pageSize', 'pageToken', 'readMask'],
relative_path='v1beta1/{+parent}/trainingPipelines',
request_field='',
request_type_name='AiplatformProjectsLocationsTrainingPipelinesListRequest',
response_type_name='GoogleCloudAiplatformV1beta1ListTrainingPipelinesResponse',
supports_download=False,
)
class ProjectsLocationsService(base_api.BaseApiService):
"""Service class for the projects_locations resource."""
_NAME = 'projects_locations'
def __init__(self, client):
super(AiplatformV1beta1.ProjectsLocationsService, self).__init__(client)
self._upload_configs = {
}
def Get(self, request, global_params=None):
r"""Gets information about a location.
Args:
request: (AiplatformProjectsLocationsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudLocationLocation) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}',
http_method='GET',
method_id='aiplatform.projects.locations.get',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='AiplatformProjectsLocationsGetRequest',
response_type_name='GoogleCloudLocationLocation',
supports_download=False,
)
def GetIamPolicy(self, request, global_params=None):
r"""Gets the access control policy for a resource. Returns an empty policy if the resource exists and does not have a policy set.
Args:
request: (AiplatformProjectsLocationsGetIamPolicyRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleIamV1Policy) The response message.
"""
config = self.GetMethodConfig('GetIamPolicy')
return self._RunMethod(
config, request, global_params=global_params)
GetIamPolicy.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/{locationsId1}:getIamPolicy',
http_method='GET',
method_id='aiplatform.projects.locations.getIamPolicy',
ordered_params=['resource'],
path_params=['resource'],
query_params=['options_requestedPolicyVersion'],
relative_path='v1beta1/{+resource}:getIamPolicy',
request_field='',
request_type_name='AiplatformProjectsLocationsGetIamPolicyRequest',
response_type_name='GoogleIamV1Policy',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists information about the supported locations for this service.
Args:
request: (AiplatformProjectsLocationsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleCloudLocationListLocationsResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations',
http_method='GET',
method_id='aiplatform.projects.locations.list',
ordered_params=['name'],
path_params=['name'],
query_params=['filter', 'pageSize', 'pageToken'],
relative_path='v1beta1/{+name}/locations',
request_field='',
request_type_name='AiplatformProjectsLocationsListRequest',
response_type_name='GoogleCloudLocationListLocationsResponse',
supports_download=False,
)
def SetIamPolicy(self, request, global_params=None):
r"""Sets the access control policy on the specified resource. Replaces any existing policy. Can return `NOT_FOUND`, `INVALID_ARGUMENT`, and `PERMISSION_DENIED` errors.
Args:
request: (AiplatformProjectsLocationsSetIamPolicyRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleIamV1Policy) The response message.
"""
config = self.GetMethodConfig('SetIamPolicy')
return self._RunMethod(
config, request, global_params=global_params)
SetIamPolicy.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/{locationsId1}:setIamPolicy',
http_method='POST',
method_id='aiplatform.projects.locations.setIamPolicy',
ordered_params=['resource'],
path_params=['resource'],
query_params=[],
relative_path='v1beta1/{+resource}:setIamPolicy',
request_field='googleIamV1SetIamPolicyRequest',
request_type_name='AiplatformProjectsLocationsSetIamPolicyRequest',
response_type_name='GoogleIamV1Policy',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
r"""Returns permissions that a caller has on the specified resource. If the resource does not exist, this will return an empty set of permissions, not a `NOT_FOUND` error. Note: This operation is designed to be used for building permission-aware UIs and command-line tools, not for authorization checking. This operation may "fail open" without warning.
Args:
request: (AiplatformProjectsLocationsTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleIamV1TestIamPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/{locationsId1}:testIamPermissions',
http_method='POST',
method_id='aiplatform.projects.locations.testIamPermissions',
ordered_params=['resource'],
path_params=['resource'],
query_params=[],
relative_path='v1beta1/{+resource}:testIamPermissions',
request_field='googleIamV1TestIamPermissionsRequest',
request_type_name='AiplatformProjectsLocationsTestIamPermissionsRequest',
response_type_name='GoogleIamV1TestIamPermissionsResponse',
supports_download=False,
)
class ProjectsService(base_api.BaseApiService):
"""Service class for the projects resource."""
_NAME = 'projects'
def __init__(self, client):
super(AiplatformV1beta1.ProjectsService, self).__init__(client)
self._upload_configs = {
}
| 51.202797
| 985
| 0.733588
| 46,793
| 505,218
| 7.745176
| 0.027013
| 0.044733
| 0.035335
| 0.021387
| 0.778014
| 0.763407
| 0.745966
| 0.730048
| 0.707077
| 0.681629
| 0
| 0.005436
| 0.177474
| 505,218
| 9,866
| 986
| 51.207987
| 0.866699
| 0.328409
| 0
| 0.661635
| 1
| 0.010369
| 0.347368
| 0.307555
| 0
| 0
| 0
| 0
| 0
| 1
| 0.061604
| false
| 0
| 0.002897
| 0
| 0.127935
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
e0f6fc1758b37447dcb538fa7b9b21599d107f6f
| 18,564
|
py
|
Python
|
epm.py
|
davetremblay/Encrypted-Password-Manager
|
213d00570967645b7522cc8b74082352e8bea374
|
[
"MIT"
] | null | null | null |
epm.py
|
davetremblay/Encrypted-Password-Manager
|
213d00570967645b7522cc8b74082352e8bea374
|
[
"MIT"
] | null | null | null |
epm.py
|
davetremblay/Encrypted-Password-Manager
|
213d00570967645b7522cc8b74082352e8bea374
|
[
"MIT"
] | 1
|
2019-07-26T12:27:17.000Z
|
2019-07-26T12:27:17.000Z
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Jul 24 11:56:29 2019
@author: DaveTremblay
"""
import ast
import os
import random
import sys
import pyAesCrypt
def random_password(length, strength):
uppercase = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
lowercase = uppercase.lower()
numerical = "0123456789"
symbology = "!#$%&()*+,-./:;<=>?@[\]^_`{|}~"
char_type = {
1: uppercase,
2: lowercase,
3: numerical,
4: symbology
}
password = ""
for n in range(length):
x = random.randint(1, strength)
y = random.randint(1, len(char_type[x])-1)
character = char_type[x][y]
password += character
return password
def decrypt():
bufferSize = 64*1024
ok = False
while not ok:
try:
password = str(
input("Decrypting file...\nEnter main password to access encrypted passwords: "))
pyAesCrypt.decryptFile(
"password_list.txt.aes", "password_list.txt", password, bufferSize)
ok = True
print("File decrypted.")
except:
print("Invalid input (Wrong password or File corrupted).")
_main_()
def encrypt():
bufferSize = 64*1024
ok = False
while not ok:
try:
password = str(
input("Encrypting file...\nEnter new main password (!!!DON'T FORGET IT!!!): "))
if "'" in password or "\"" in password:
print("Please don't use ' or \". Try again.")
else:
password2 = str(input("Enter new main password again: "))
if password == password2:
pyAesCrypt.encryptFile(
"password_list.txt", "password_list.txt.aes", password, bufferSize)
ok = True
print("File encrypted.\nClearing and deleting password_list.txt...")
with open("password_list.txt", "w") as f:
f.write("")
f.close()
else:
print("Passwords don't match. Try again.")
except:
print("Invalid input (Wrong password or File corrupted).")
encrypt()
def get_password_collection():
if not os.path.isfile("password_list.txt") and not os.path.isfile("password_list.txt.aes"):
password_dict = {}
with open("password_list.txt", "w") as f:
f.close()
else:
decrypt()
os.remove("password_list.txt.aes")
with open("password_list.txt", "r") as f:
password_dict = f.read()
try:
password_dict = ast.literal_eval("{"+password_dict+"}")
except:
password_dict = {}
return password_dict
def create_new_password(password_dict):
ok = False
while not ok:
try:
identifier = str(input("Identifier (unique value): "))
if identifier in password_dict:
print(
"Identifier already in use. Please add a new identifier or edit a previous one.")
encrypt()
_main_()
else:
ok = True
except:
print("Invalid input.")
ok = False
while not ok:
try:
website = str(input("Website name or url: "))
ok = True
except:
print("Invalid input.")
ok = False
while not ok:
try:
login = str(input("Nickname / Email address: "))
ok = True
except:
print("Invalid input.")
ok = False
while not ok:
try:
length = int(input("Desired password length: "))
if length > 0:
ok = True
else:
print("Invalid input.")
except:
print("Invalid input.")
ok = False
while not ok:
try:
strength = int(input(
"Strength\n1: Uppercase\n2: 1 + lowercase\n3: 2 + numbers\n4: 3 + special characters\n\nDesired password strength (1-4): "))
if strength > 0 and strength < 5:
ok = True
else:
print("Invalid input.")
except:
print("Invalid input.")
password = random_password(length, strength)
account = [website, login, password]
pass_line = {identifier: account}
with open("password_list.txt", "a") as f:
f.write(str(pass_line)[1:len(str(pass_line))-1] + ",\n")
f.close()
return [pass_line, identifier]
def manual_password(password_dict):
ok = False
while not ok:
try:
identifier = str(input("Identifier (unique value): "))
if identifier in password_dict:
print(
"Identifier already in use. Please add a new identifier or edit a previous one.")
encrypt()
_main_()
else:
ok = True
except:
print("Invalid input.")
ok = False
while not ok:
try:
website = str(input("Website name or url: "))
ok = True
except:
print("Invalid input.")
ok = False
while not ok:
try:
login = str(input("Nickname / Email address: "))
ok = True
except:
print("Invalid input.")
ok = False
while not ok:
try:
password = str(input("Enter password manually: "))
ok = True
except:
print("Invalid input.")
account = [website, login, password]
pass_line = {identifier: account}
with open("password_list.txt", "a") as f:
f.write(str(pass_line)[1:len(str(pass_line))-1] + ",\n")
f.close()
return [pass_line, identifier]
def edit_password(password_dict):
ok = False
while not ok:
try:
identifier = str(input("Identifier of what you want to edit: "))
website = password_dict[identifier][0]
ok = True
except:
print("Entry not found.")
encrypt()
_main_()
login = password_dict[identifier][1]
old_password = password_dict[identifier][2]
ok = False
while not ok:
try:
length = int(input("Desired new password length: "))
if length > 0:
ok = True
else:
print("Invalid input.")
except:
print("Invalid input.")
ok = False
while not ok:
try:
strength = int(input(
"Strength\n1: Uppercase\n2: 1 + lowercase\n3: 2 + numbers\n4: 3 + special characters\n\nDesired new password strength (1-4): "))
if strength > 0 and strength < 5:
ok = True
else:
print("Invalid input.")
except:
print("Invalid input.")
password = random_password(length, strength)
password_dict[identifier][2] = password
old_account = [website, login, old_password]
account = [website, login, password]
old_pass_line = {identifier: old_account}
pass_line = {identifier: account}
with open('password_list.txt', 'r') as f:
filedata = f.read()
filedata = filedata.replace(str(old_pass_line)[1:len(
str(old_pass_line))-1], str(pass_line)[1:len(str(pass_line))-1])
with open('password_list.txt', 'w') as f:
f.write(filedata)
f.close()
return [pass_line, identifier]
def edit_manual_password(password_dict):
ok = False
while not ok:
try:
identifier = str(input("Identifier of what you want to edit: "))
website = password_dict[identifier][0]
ok = True
except:
print("Entry not found.")
encrypt()
_main_()
login = password_dict[identifier][1]
old_password = password_dict[identifier][2]
ok = False
while not ok:
try:
password = str(input("Enter password manually: "))
ok = True
except:
print("Invalid input.")
password_dict[identifier][2] = password
old_account = [website, login, old_password]
account = [website, login, password]
old_pass_line = {identifier: old_account}
pass_line = {identifier: account}
with open('password_list.txt', 'r') as f:
filedata = f.read()
filedata = filedata.replace(str(old_pass_line)[1:len(
str(old_pass_line))-1], str(pass_line)[1:len(str(pass_line))-1])
with open('password_list.txt', 'w') as f:
f.write(filedata)
f.close()
return [pass_line, identifier]
def edit_nickname(password_dict):
ok = False
while not ok:
try:
identifier = str(input("Identifier of what you want to edit: "))
website = password_dict[identifier][0]
ok = True
except:
print("Entry not found.")
encrypt()
_main_()
old_login = password_dict[identifier][1]
password = password_dict[identifier][2]
ok = False
while not ok:
try:
login = str(input("New account name / email address: "))
ok = True
except:
print("Invalid input.")
password_dict[identifier][1] = login
old_account = [website, old_login, password]
account = [website, login, password]
old_pass_line = {identifier: old_account}
pass_line = {identifier: account}
with open('password_list.txt', 'r') as f:
filedata = f.read()
filedata = filedata.replace(str(old_pass_line)[1:len(
str(old_pass_line))-1], str(pass_line)[1:len(str(pass_line))-1])
with open('password_list.txt', 'w') as f:
f.write(filedata)
f.close()
return [pass_line, identifier]
def edit_website(password_dict):
ok = False
while not ok:
try:
identifier = str(input("Identifier of what you want to edit: "))
old_website = password_dict[identifier][0]
ok = True
except:
print("Entry not found.")
encrypt()
_main_()
login = password_dict[identifier][1]
password = password_dict[identifier][2]
ok = False
while not ok:
try:
website = str(input("New website name or url: "))
ok = True
except:
print("Invalid input.")
password_dict[identifier][0] = website
old_account = [old_website, login, password]
account = [website, login, password]
old_pass_line = {identifier: old_account}
pass_line = {identifier: account}
with open('password_list.txt', 'r') as f:
filedata = f.read()
filedata = filedata.replace(str(old_pass_line)[1:len(
str(old_pass_line))-1], str(pass_line)[1:len(str(pass_line))-1])
with open('password_list.txt', 'w') as f:
f.write(filedata)
f.close()
return [pass_line, identifier]
def delete_line(password_dict):
ok = False
while not ok:
try:
identifier = str(input("Identifier of what you want to delete: "))
del password_dict[identifier]
ok = True
except:
print("Entry not found.")
encrypt()
_main_()
with open('password_list.txt', 'w') as f:
f.write(str(password_dict)[1:len(str(password_dict))-1])
f.close()
def search_line(password_dict):
ok = False
while not ok:
try:
identifier = str(input("Identifier of what you want to search: "))
password_line = password_dict[identifier]
ok = True
except:
print("Entry not found.")
encrypt()
_main_()
return password_line
def _main_():
ok = False
while not ok:
try:
command = str(input(
"What do you want to do?\n(A)dd an entry\n(E)dit an entry\n(D)elete an entry\n(S)earch an entry\n(V)iew all entries\n(Q)uit\n\nEnter command: "))
if command.lower() in "aedsvq":
ok = True
else:
print("Invalid input.")
except:
print("Invalid input.")
if command.lower() == "a":
password_dict = get_password_collection()
ok = False
while not ok:
try:
manual = input("Enter password manually? (y/n): ")
if manual.lower() == "y" or manual.lower() == "n":
ok = True
else:
print("Invalid input.")
except:
print("Invalid input.")
if manual.lower() == "n":
new_password_list = create_new_password(password_dict)
new_password = new_password_list[0]
identifier = new_password_list[1]
password_dict.update(new_password)
print("Entry created!\nIdentifier (unique value): "+identifier+"\nWebsite name or url: " +
new_password[identifier][0]+"\nNickname or email address: "+new_password[identifier][1]+"\nPassword: "+new_password[identifier][2])
elif manual.lower() == "y":
new_password_list = manual_password(password_dict)
new_password = new_password_list[0]
identifier = new_password_list[1]
password_dict.update(new_password)
print("Entry created!\nIdentifier (unique value): "+identifier+"\nWebsite name or url: " +
new_password[identifier][0]+"\nNickname or email address: "+new_password[identifier][1]+"\nPassword: "+new_password[identifier][2])
elif command.lower() == "e":
if not os.path.isfile("password_list.txt.aes"):
print("*No entry to edit.*")
_main_()
else:
ok = False
while not ok:
try:
command_2 = str(input(
"What do you want to edit?\n(W)ebsite name or url\n(N)ickname or Email address\n(P)assword\n\nEdit: "))
if command_2.lower() in "wnp":
ok = True
else:
print("Invalid input.")
except:
print("Invalid input.")
if command_2.lower() == "p":
password_dict = get_password_collection()
ok = False
while not ok:
try:
manual = input("Enter password manually? (y/n): ")
if manual.lower() == "y" or manual.lower() == "n":
ok = True
else:
print("Invalid input.")
except:
print("Invalid input.")
if manual.lower() == "n":
edit_password_list = edit_password(password_dict)
new_password = edit_password_list[0]
identifier = edit_password_list[1]
password_dict.update(new_password)
print("Password edited!\nIdentifier (unique value): "+identifier+"\nWebsite name or url: " +
new_password[identifier][0]+"\nNickname or email address: "+new_password[identifier][1]+"\nPassword: "+new_password[identifier][2])
elif manual.lower() == "y":
edit_password_list = edit_manual_password(password_dict)
new_password = edit_password_list[0]
identifier = edit_password_list[1]
password_dict.update(new_password)
print("Password edited!\nIdentifier (unique value): "+identifier+"\nWebsite name or url: " +
new_password[identifier][0]+"\nNickname or email address: "+new_password[identifier][1]+"\nPassword: "+new_password[identifier][2])
elif command_2.lower() == "n":
password_dict = get_password_collection()
edit_nickname_list = edit_nickname(password_dict)
new_nickname = edit_nickname_list[0]
identifier = edit_nickname_list[1]
print("Nickname or email address edited!\nIdentifier (unique value): "+identifier+"\nWebsite name or url: " +
new_nickname[identifier][0]+"\nNickname or email address: "+new_nickname[identifier][1]+"\nPassword: "+new_nickname[identifier][2])
elif command_2.lower() == "w":
password_dict = get_password_collection()
edit_website_list = edit_website(password_dict)
new_website = edit_website_list[0]
identifier = edit_website_list[1]
print("Website name or url edited!\nIdentifier (unique value): "+identifier+"\nWebsite name or url: " +
new_website[identifier][0]+"\nNickname or email address: "+new_website[identifier][1]+"\nPassword: "+new_website[identifier][2])
elif command.lower() == "d":
if not os.path.isfile("password_list.txt.aes"):
print("*No entry to delete.*")
_main_()
else:
password_dict = get_password_collection()
delete_line(password_dict)
print("Entry deleted!")
elif command.lower() == "s":
if not os.path.isfile("password_list.txt.aes"):
print("*No entry to search.*")
_main_()
else:
password_dict = get_password_collection()
password_line = search_line(password_dict)
print("Website name or url: " +
password_line[0]+"\nNickname or email address: "+password_line[1]+"\nPassword: "+password_line[2])
elif command.lower() == "v":
if not os.path.isfile("password_list.txt.aes"):
print("*No entry to show.*")
_main_()
else:
password_dict = get_password_collection()
print(
"\n'Identifier': ['Website', 'Nickname / Email', 'Password']\n")
if len(password_dict) == 0:
print("*No entry to show.*")
else:
print(str(str(password_dict)[1:len(str(password_dict).replace(
"], ", "]\n"))-1].replace("], ", "]\n")+"]\n\n"))
elif command.lower() == "q":
raise sys.exit()
else:
print("Invalid input.")
_main_()
encrypt()
os.remove("password_list.txt")
print("password_list.txt cleared and deleted.")
_main_()
| 35.631478
| 161
| 0.540993
| 2,063
| 18,564
| 4.722249
| 0.096946
| 0.071443
| 0.050606
| 0.040033
| 0.785773
| 0.761137
| 0.742045
| 0.711558
| 0.693492
| 0.66978
| 0
| 0.012113
| 0.341844
| 18,564
| 520
| 162
| 35.7
| 0.785235
| 0.005495
| 0
| 0.753653
| 0
| 0.008351
| 0.199046
| 0.017233
| 0
| 0
| 0
| 0
| 0
| 1
| 0.02714
| false
| 0.334029
| 0.010438
| 0
| 0.056367
| 0.118998
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 6
|
460463142e21618ebea24bba23bb103526947813
| 197
|
py
|
Python
|
src/wai/bynning/util/__init__.py
|
waikato-datamining/bynning
|
01b7368d4dc1094651d7cbe067576dfb3756a1d3
|
[
"MIT"
] | null | null | null |
src/wai/bynning/util/__init__.py
|
waikato-datamining/bynning
|
01b7368d4dc1094651d7cbe067576dfb3756a1d3
|
[
"MIT"
] | null | null | null |
src/wai/bynning/util/__init__.py
|
waikato-datamining/bynning
|
01b7368d4dc1094651d7cbe067576dfb3756a1d3
|
[
"MIT"
] | null | null | null |
"""
Utility functions for binning.
"""
from ._conservatively_cache import conservatively_cache
from ._frequency_divide import frequency_divide
from ._integer_dot_product import integer_dot_product
| 28.142857
| 55
| 0.857868
| 24
| 197
| 6.583333
| 0.541667
| 0.240506
| 0.21519
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.091371
| 197
| 6
| 56
| 32.833333
| 0.882682
| 0.152284
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
e800e39cb8d38337c26f72f2b90511955f104d34
| 130
|
py
|
Python
|
smtbx/refinement/constraints/geometrical/all.py
|
dperl-sol/cctbx_project
|
b9e390221a2bc4fd00b9122e97c3b79c632c6664
|
[
"BSD-3-Clause-LBNL"
] | 155
|
2016-11-23T12:52:16.000Z
|
2022-03-31T15:35:44.000Z
|
smtbx/refinement/constraints/geometrical/all.py
|
dperl-sol/cctbx_project
|
b9e390221a2bc4fd00b9122e97c3b79c632c6664
|
[
"BSD-3-Clause-LBNL"
] | 590
|
2016-12-10T11:31:18.000Z
|
2022-03-30T23:10:09.000Z
|
smtbx/refinement/constraints/geometrical/all.py
|
dperl-sol/cctbx_project
|
b9e390221a2bc4fd00b9122e97c3b79c632c6664
|
[
"BSD-3-Clause-LBNL"
] | 115
|
2016-11-15T08:17:28.000Z
|
2022-02-09T15:30:14.000Z
|
from __future__ import absolute_import, division, print_function
from smtbx.refinement.constraints.geometrical.hydrogens import *
| 43.333333
| 64
| 0.869231
| 15
| 130
| 7.133333
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.076923
| 130
| 2
| 65
| 65
| 0.891667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0.5
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
|
0
| 6
|
e8213ab7dcb4bf3c584e4890ef6627bd294be6a4
| 2,276
|
py
|
Python
|
src/onegov/election_day/screen_widgets/__init__.py
|
politbuero-kampagnen/onegov-cloud
|
20148bf321b71f617b64376fe7249b2b9b9c4aa9
|
[
"MIT"
] | null | null | null |
src/onegov/election_day/screen_widgets/__init__.py
|
politbuero-kampagnen/onegov-cloud
|
20148bf321b71f617b64376fe7249b2b9b9c4aa9
|
[
"MIT"
] | null | null | null |
src/onegov/election_day/screen_widgets/__init__.py
|
politbuero-kampagnen/onegov-cloud
|
20148bf321b71f617b64376fe7249b2b9b9c4aa9
|
[
"MIT"
] | null | null | null |
from onegov.election_day.screen_widgets.election import (
ElectionCandidatesByEntityTableWidget,
ElectionCandidatesChartWidget,
ElectionCandidatesTableWidget,
ElectionCompoundCandidatesTableWidget,
ElectionCompoundDistrictsTableWidget,
ElectionCompoundListsChartWidget,
ElectionCompoundListsTableWidget,
ElectionListsChartWidget,
ElectionListsTableWidget,
)
from onegov.election_day.screen_widgets.generic import (
ColumnWidget,
CountedEntitiesWidget,
H1Widget,
H2Widget,
H3Widget,
HRWidget,
LogoWidget,
ProgressWidget,
RowWidget,
TextWidget,
TitleWidget,
)
from onegov.election_day.screen_widgets.vote import (
VoteCounterProposalDistrictsMap,
VoteCounterProposalEntitiesMap,
VoteCounterProposalEntitiesTableWidget,
VoteCounterProposalResultBarWidget,
VoteCounterProposalTitleWidget,
VoteProposalDistrictsMap,
VoteProposalEntitiesMap,
VoteProposalEntitiesTableWidget,
VoteProposalResultBarWidget,
VoteTieBreakerDistrictsMap,
VoteTieBreakerEntitiesMap,
VoteTieBreakerEntitiesTableWidget,
VoteTieBreakerResultBarWidget,
VoteTieBreakerTitleWidget
)
__all__ = (
'ColumnWidget',
'CountedEntitiesWidget',
'ElectionCandidatesByEntityTableWidget',
'ElectionCandidatesChartWidget',
'ElectionCandidatesTableWidget',
'ElectionCompoundCandidatesTableWidget',
'ElectionCompoundDistrictsTableWidget',
'ElectionCompoundListsChartWidget',
'ElectionCompoundListsTableWidget',
'ElectionListsChartWidget',
'ElectionListsTableWidget',
'H1Widget',
'H2Widget',
'H3Widget',
'HRWidget',
'LogoWidget',
'ProgressWidget',
'RowWidget',
'TextWidget',
'TitleWidget',
'VoteCounterProposalDistrictsMap',
'VoteCounterProposalEntitiesMap',
'VoteCounterProposalEntitiesTableWidget',
'VoteCounterProposalResultBarWidget',
'VoteCounterProposalTitleWidget',
'VoteProposalDistrictsMap',
'VoteProposalEntitiesMap',
'VoteProposalEntitiesTableWidget',
'VoteProposalResultBarWidget',
'VoteTieBreakerDistrictsMap',
'VoteTieBreakerEntitiesMap',
'VoteTieBreakerEntitiesTableWidget',
'VoteTieBreakerResultBarWidget',
'VoteTieBreakerTitleWidget',
)
| 28.810127
| 57
| 0.774165
| 93
| 2,276
| 18.83871
| 0.473118
| 0.017123
| 0.030822
| 0.035959
| 0.939498
| 0.939498
| 0.881279
| 0.881279
| 0.881279
| 0.46347
| 0
| 0.003133
| 0.158612
| 2,276
| 78
| 58
| 29.179487
| 0.911749
| 0
| 0
| 0
| 0
| 0
| 0.353691
| 0.310633
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.039474
| 0
| 0.039474
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
e831e82aa05bc2c2b2ab3114d3a0133b1f0fa9ef
| 709
|
py
|
Python
|
HelloWorld/summarychallenge.py
|
zahraaliaghazadeh/python
|
2f2d0141a916c99e8724f803bd4e5c7246a7a02e
|
[
"MIT"
] | null | null | null |
HelloWorld/summarychallenge.py
|
zahraaliaghazadeh/python
|
2f2d0141a916c99e8724f803bd4e5c7246a7a02e
|
[
"MIT"
] | null | null | null |
HelloWorld/summarychallenge.py
|
zahraaliaghazadeh/python
|
2f2d0141a916c99e8724f803bd4e5c7246a7a02e
|
[
"MIT"
] | null | null | null |
# print("Please choose your option from the list below:")
# print("1:\tLearn Python")
# print("2:\tLearn Java")
# print("3:\tGo swimming")
# print("4:\tHave dinner")
# print("5:\tGo to bed")
# print("0:\tExit")
choice = "-"
while choice != "0":
# while True:
# choice = input()
# if choice == "0":
# break
# elif choice in "12345":
if choice in "12345":
print("You chose {}".format(choice))
else:
print("Please choose your option from the list below:")
print("1:\tLearn Python")
print("2:\tLearn Java")
print("3:\tGo swimming")
print("4:\tHave dinner")
print("5:\tGo to bed")
print("0:\tExit")
choice = input()
| 25.321429
| 63
| 0.558533
| 94
| 709
| 4.212766
| 0.404255
| 0.055556
| 0.085859
| 0.106061
| 0.717172
| 0.717172
| 0.717172
| 0.717172
| 0.717172
| 0.717172
| 0
| 0.045541
| 0.2567
| 709
| 27
| 64
| 26.259259
| 0.705882
| 0.390691
| 0
| 0
| 0
| 0
| 0.348449
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.615385
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
e8381ae864c6696df51d62d705f5983be9bb12c7
| 31,752
|
py
|
Python
|
pyNastran/op2/tables/oee_energy/oee_objects.py
|
jtran10/pyNastran
|
4aed8e05b91576c2b50ee835f0497a9aad1d2cb0
|
[
"BSD-3-Clause"
] | null | null | null |
pyNastran/op2/tables/oee_energy/oee_objects.py
|
jtran10/pyNastran
|
4aed8e05b91576c2b50ee835f0497a9aad1d2cb0
|
[
"BSD-3-Clause"
] | null | null | null |
pyNastran/op2/tables/oee_energy/oee_objects.py
|
jtran10/pyNastran
|
4aed8e05b91576c2b50ee835f0497a9aad1d2cb0
|
[
"BSD-3-Clause"
] | null | null | null |
from __future__ import (nested_scopes, generators, division, absolute_import,
print_function, unicode_literals)
from six import integer_types, binary_type
import numpy as np
from pyNastran.op2.result_objects.op2_objects import ScalarObject
from pyNastran.f06.f06_formatting import _eigenvalue_header, write_float_13e
SORT2_TABLE_NAME_MAP = {
'ONRGY2' : 'ONRGY1',
}
class RealStrainEnergyArray(ScalarObject):
"""
::
E L E M E N T S T R A I N E N E R G I E S
ELEMENT-TYPE = QUAD4 * TOTAL ENERGY OF ALL ELEMENTS IN PROBLEM = 9.817708E+08
SUBCASE 1 * TOTAL ENERGY OF ALL ELEMENTS IN SET 1 = 4.192036E+08
ELEMENT-ID STRAIN-ENERGY PERCENT OF TOTAL STRAIN-ENERGY-DENSITY
12 2.291087E+07 2.3336 2.291087E+02
13 1.582968E+07 1.6124 1.055312E+02
14 6.576075E+07 6.6982 3.288037E+02
"""
def __init__(self, data_code, is_sort1, isubcase, dt):
self.element_type = None
self.element_name = None
ScalarObject.__init__(self, data_code, isubcase)
#self.code = [self.format_code, self.sort_code, self.s_code]
#self.ntimes = 0 # or frequency/mode
#self.ntotal = 0
self.nelements = 0 # result specific
self.itime = None
self.itotal2 = 0
#self.element_name_count = OrderedDict()
self.dt_temp = None
#if is_sort1:
#pass
#else:
#raise NotImplementedError('SORT2')
@property
def is_real(self):
return True
@property
def is_complex(self):
return False
def _reset_indices(self):
self.itotal = 0
self.ielement = 0
def get_headers(self):
headers = [
'strain_energy', 'percent', 'strain_energy_density'
]
return headers
def build(self):
"""sizes the vectorized attributes of the RealStrainEnergyArray"""
if self.is_built:
return
del self.dt_temp
#print(self._ntotals)
assert self.ntimes > 0, 'ntimes=%s' % self.ntimes
assert self.nelements > 0, 'nelements=%s' % self.nelements
assert self.ntotal > 0, 'ntotal=%s' % self.ntotal
self.ntotal = max(self._ntotals)
#if max(self._ntotals) != min(self._ntotals):
#raise RuntimeError('variable length in RealStrainEnergyArray')
#self.names = []
#self.nelements = self.ntotal // self.ntimes
self.nelements = self.ntotal
self.itime = 0
self.ielement = 0
self.itotal = 0
#self.itotal2 = 0
#self.ntimes = 0
#self.nelements = 0
self.is_built = True
#print("ntimes=%s nelements=%s ntotal=%s" % (self.ntimes, self.nelements, self.ntotal))
dtype = 'float32'
if isinstance(self.nonlinear_factor, integer_types):
dtype = 'int32'
self.build_data(dtype)
def build_data(self, dtype):
"""actually performs the build step"""
self._times = np.zeros(self.ntimes, dtype=dtype)
#self.element = zeros(self.nelements, dtype='int32')
#if dtype in 'DMIG':
#print(self.element_name, self.element_type)
if self.element_name == 'DMIG':
self.element = np.zeros((self.ntimes, self.nelements), dtype='|U8')
else:
self.element = np.zeros((self.ntimes, self.nelements), dtype='int32')
#self.element_data_type = empty(self.nelements, dtype='|U8')
#[energy, percent, density]
assert isinstance(self.ntimes, integer_types), self.ntimes
assert isinstance(self.ntotal, integer_types), self.ntotal
self.data = np.zeros((self.ntimes, self.nelements, 3), dtype='float32')
def build_dataframe(self):
"""
major-axis - the axis
mode 1 2 3
freq 1.0 2.0 3.0
ElementID Item
1 T1
T2
...
major_axis / top = [
[1, 2, 3],
[1.0, 2.0, 3.0]
]
minor_axis / headers = [ese, %, sed]
name = mode
"""
import pandas as pd
#print(''.join(self.get_stats()))
#print(self.element)
#print(self.data)
headers = self.get_headers()
ntimes = self.element.shape[0]
nelements = self.element.shape[1]
element = self.element.ravel()
if element.dtype is np.dtype(np.int32):
compare = 0
else:
# unicode
#value = value.tolist()
element = np.asarray(element, dtype='|U8')
compare = ''
#print('ntimes=%s' % ntimes)
if ntimes == 1:
column_names, column_values = self._build_dataframe_transient_header()
self.data_frame = pd.Panel(self.data, items=column_values,
major_axis=element,
minor_axis=headers).to_frame()
self.data_frame.columns.names = column_names
else:
# we can get into this in a linear case
# F:\work\pyNastran\examples\Dropbox\move_tpl\setp04.op2
nvalues = ntimes * nelements
#if self.nonlinear_factor not in (None, np.nan):
column_names, column_values = self._build_dataframe_transient_header()
#column_names = column_names[0]
#column_values = column_values[0]
column_values2 = []
for value in column_values:
values2 = []
for valuei in value:
values = np.ones(nelements) * valuei
values2.append(values)
values3 = np.vstack(values2).ravel()
column_values2.append(values3)
df1 = pd.DataFrame(column_values2).T
df1.columns = column_names
df2 = pd.DataFrame(element)
df2.columns = ['ElementID']
dfs = [df2]
for i, header in enumerate(headers):
df = pd.DataFrame(self.data[:, :, i].ravel())
df.columns = [header]
dfs.append(df)
self.data_frame = df1.join(dfs)
try:
self.data_frame.columns.names = column_names
except ValueError:
#print('headers =', headers)
print('self.cannot apply column_names=%s to RealStrainEnergyArray: %r' % (
column_names, self.element_name))
# remove empty rows
assert self.data_frame is not None
self.data_frame = self.data_frame[self.data_frame.ElementID != compare]
def __eq__(self, table):
return self.assert_equal(table)
def assert_equal(self, table, rtol=1.e-5, atol=1.e-8):
self._eq_header(table)
assert self.is_sort1 == table.is_sort1
if not np.array_equal(self.element, table.element):
assert self.element.shape == table.element.shape, 'element shape=%s table.shape=%s' % (self.element.shape, table.element.shape)
msg = 'table_name=%r class_name=%s\n' % (self.table_name, self.__class__.__name__)
msg += '%s\n' % str(self.code_information())
msg += 'itime: eid1 eid2\n'
i = 0
for itime in range(self.ntimes):
for eid1, eid2 in zip(self.element[itime, :], table.element[itime, :]):
msg += '%s: %s %s\n' % (itime, eid1, eid2)
if eid1 != eid2 and np.isnan(eid1):
i += 1
if i > 10:
print(msg)
raise ValueError(msg)
if i > 0:
raise ValueError(msg)
if not np.array_equal(self.data, table.data):
msg = 'table_name=%r class_name=%s\n' % (self.table_name, self.__class__.__name__)
msg += '%s\n' % str(self.code_information())
i = 0
for itime in range(self.ntimes):
for ie, eid in enumerate(self.element[itime, :]):
t1 = self.data[itime, ie, :]
t2 = table.data[itime, ie, :]
(energyi1, percenti1, densityi1) = t1
(energyi2, percenti2, densityi2) = t2
if np.isnan(densityi1) or not np.isfinite(densityi1):
if not np.array_equal(t1[:2], t2[:2]):
msg += (
'%s (%s, %s)\n'
'%s (%s, %s)\n' % (
eid, energyi1, percenti1,
' ' * len(str(eid)),
energyi2, percenti2,
))
i += 1
if i > 10:
print(msg)
raise ValueError(msg)
elif not np.array_equal(t1, t2):
msg += (
'%s (%s, %s, %s)\n'
'%s (%s, %s, %s)\n' % (
eid, energyi1, percenti1, densityi1,
' ' * len(str(eid)),
energyi2, percenti2, densityi2,
))
i += 1
if i > 10:
print(msg)
raise ValueError(msg)
#print(msg)
if i > 0:
raise ValueError(msg)
return True
def add_sort1(self, dt, eid, energyi, percenti, densityi):
"""unvectorized method for adding SORT1 transient data"""
#itime = self.itime // self.nelement_types
assert (isinstance(eid, int) and eid > 0) or isinstance(eid, binary_type), 'dt=%s eid=%s' % (dt, eid)
itime = self.itime
self._times[itime] = dt
self.element[itime, self.ielement] = eid
if self.element_name == 'DMIG':
if not np.isnan(densityi):
raise RuntimeError(
'RealStrainEnergyArray: itime=%s ielement=%s; '
'dt=%s eid=%s energyi=%s percenti=%s densityi=%s' % (
self.itime, self.ielement, dt, eid, energyi, percenti, densityi))
self.data[itime, self.ielement, :] = [energyi, percenti, np.nan]
else:
try:
#self.element_data_type[self.ielement] = etype
self.data[itime, self.ielement, :] = [energyi, percenti, densityi]
except (ValueError, IndexError):
print('RealStrainEnergyArray: itime=%s ielement=%s; '
'dt=%s eid=%s energyi=%s percenti=%s densityi=%s' % (
self.itime, self.ielement, dt, eid, energyi, percenti, densityi))
raise
self.ielement += 1
self.itotal += 1
def finalize(self):
self.set_as_sort1()
def set_as_sort1(self):
"""changes the table into SORT1"""
if self.is_sort1:
return
try:
analysis_method = self.analysis_method
except AttributeError:
print(self.code_information())
raise
#print(self.get_stats())
#print(self.node_gridtype)
#print(self.data.shape)
#aaa
self.sort_method = 1
self.sort_bits[1] = 0
bit0, bit1, bit2 = self.sort_bits
self.table_name = SORT2_TABLE_NAME_MAP[self.table_name]
self.sort_code = bit0 + 2*bit1 + 4*bit2
#print(self.code_information())
assert self.is_sort1
if analysis_method != 'N/A':
self.data_names[0] = analysis_method
#print(self.table_name_str, analysis_method, self._times)
setattr(self, self.analysis_method + 's', self._times)
del self.analysis_method
def get_stats(self, short=False):
if not self.is_built:
return [
'<%s>\n' % self.__class__.__name__,
' ntimes: %i\n' % self.ntimes,
' ntotal: %i\n' % self.ntotal,
]
nelements = self.nelements
ntimes = self.ntimes
#ntotal = self.ntotal
msg = []
if self.nonlinear_factor not in (None, np.nan): # transient
msg.append(' type=%s element_name=%r ntimes=%i nelements=%i\n'
% (self.__class__.__name__, self.element_name, ntimes, nelements))
ntimes_word = 'ntimes'
else:
msg.append(' type=%s element_name=%r nelements=%i\n'
% (self.__class__.__name__, self.element_name, nelements))
ntimes_word = '1'
headers = self.get_headers()
n = len(headers)
msg.append(' element: [%s, nelements]; eid=100000000 -> total\n' % (ntimes_word))
msg.append(' data: [%s, nelements, %i] where %i=[%s]\n' % (ntimes_word, n, n, str(', '.join(headers))))
msg.append(' data.shape = %s\n' % str(self.data.shape).replace('L', ''))
#msg.append(' element type: %s\n' % self.element_type)
#msg.append(' element name: %s\n' % self.element_name)
msg += self.get_data_code()
return msg
def write_f06(self, f06_file, header=None, page_stamp='PAGE %s',
page_num=1, is_mag_phase=False, is_sort1=True):
if header is None:
header = []
# ' EIGENVALUE = 2.005177E+05'
# ' CYCLES = 7.126832E+01'
# ' E L E M E N T S T R A I N E N E R G I E S'
# ' '
# ' ELEMENT-TYPE = TETRA * TOTAL ENERGY OF ALL ELEMENTS IN PROBLEM = 1.002589E+05'
# ' MODE 1 * TOTAL ENERGY OF ALL ELEMENTS IN SET -1 = 1.002589E+05'
# '0'
# ' ELEMENT-ID STRAIN-ENERGY PERCENT OF TOTAL STRAIN-ENERGY-DENSITY'
# ' 4 3.247409E+00 0.0032 1.948445E+01'
# ' 5 3.977916E+00 0.0040 2.386749E+01'
# ''
# ' TYPE = TETRA SUBTOTAL 7.225325E+00 0.0072'
msg_temp = (
' E L E M E N T S T R A I N E N E R G I E S\n'
' \n'
' ELEMENT-TYPE = %s * TOTAL ENERGY OF ALL ELEMENTS IN PROBLEM = %s\n'
' MODE %8i * TOTAL ENERGY OF ALL ELEMENTS IN SET -1 = %s\n'
'0\n'
' ELEMENT-ID STRAIN-ENERGY PERCENT OF TOTAL STRAIN-ENERGY-DENSITY\n'
)
ntimes = self.data.shape[0]
#etype = self.element_data_type
for itime in range(ntimes):
dt = self._times[itime] # TODO: rename this...
header = _eigenvalue_header(self, header, itime, ntimes, dt)
total_energy = 0.
total_set_energy = 0.
eids = self.element[itime, :]
# energy, percent, density
energy = self.data[itime, :, 0]
percent = self.data[itime, :, 1]
density = self.data[itime, :, 2]
#itotal = np.where(eids == 100000000)[0][0]
#total_energy = self.data[:, :, 0].sum()
#total_set_energy = energy.sum()
#total_set_energy = energy[itotal]
#total_percent = percent.sum()
msg_temp2 = [msg_temp % (self.element_name, total_energy, itime + 1, total_set_energy)]
f06_file.write(''.join(header + msg_temp2))
fmt1 = ' ' * 36 + '%10s %-13s %.4f %s\n'
fmt1_nan = ' ' * 36 + '%10s %-13s %.4f %s\n'
fmt2 = '\n TYPE = %-8s SUBTOTAL %13s %.4f\n'
for (eid, energyi, percenti, densityi) in zip(eids, energy, percent, density):
senergyi = write_float_13e(energyi)
sdensityi = write_float_13e(densityi)
# ELEMENT-ID STRAIN-ENERGY PERCENT OF TOTAL STRAIN-ENERGY-DENSITY
# 1 -8.307121E-12 0.0052 -2.886861E-12
if eid == 100000000:
f06_file.write(fmt2 % (self.element_name, senergyi, percenti))
break
try:
f06_file.write(fmt1 % (eid, senergyi, percenti, sdensityi))
except TypeError:
#print('eid = %r; type=%s' % (eid, type(eid)))
#print('senergyi = %r; type=%s' % (senergyi, type(senergyi)))
#print('percenti = %r; type=%s' % (percenti, type(percenti)))
#print('sdensityi = %r; type=%s' % (sdensityi, type(sdensityi)))
assert np.isnan(sdensityi), 'eid=%s sdensityi=%s' % (eid, sdensityi)
f06_file.write(fmt1_nan % (eid, senergyi, percenti, ''))
#if 0:
#print('senergyi = %r; type=%s' % (senergyi, type(senergyi)))
#print('percenti = %r; type=%s' % (percenti, type(percenti)))
#print('sdensityi = %r; type=%s' % (sdensityi, type(sdensityi)))
#msg = fmt1 % (eid, senergyi, percenti, sdensityi)
#raise TypeError(msg)
#raise RuntimeError(msg)
f06_file.write(page_stamp % page_num)
page_num += 1
break
return page_num - 1
class ComplexStrainEnergyArray(ScalarObject):
"""
::
FREQUENCY = 2.000000E+03
E L E M E N T S T R A I N E N E R G I E S ( A V E R A G E )
ELEMENT-TYPE = QUAD4 * TOTAL ENERGY OF ALL ELEMENTS IN PROBLEM = 1.611784E-08
SUBCASE 1 * TOTAL ENERGY OF ALL ELEMENTS IN SET -1 = 1.611784E-08
0
ELEMENT-ID STRAIN-ENERGY (MAG/PHASE) PERCENT OF TOTAL STRAIN-ENERGY-DENSITY
5 2.027844E-10 / 0.0 1.2581 2.027844E-09
"""
def __init__(self, data_code, is_sort1, isubcase, dt):
self.element_type = None
self.element_name = None
ScalarObject.__init__(self, data_code, isubcase)
#self.code = [self.format_code, self.sort_code, self.s_code]
#self.ntimes = 0 # or frequency/mode
#self.ntotal = 0
self.nelements = 0 # result specific
self.itime = None
self.itotal2 = 0
#self.element_name_count = OrderedDict()
self.dt_temp = None
if is_sort1:
pass
else:
raise NotImplementedError('SORT2')
@property
def is_real(self):
return False
@property
def is_complex(self):
return True
def _reset_indices(self):
self.itotal = 0
self.ielement = 0
def get_headers(self):
headers = [
'strain_energy', 'percent', 'strain_energy_density'
]
return headers
def build(self):
"""sizes the vectorized attributes of the ComplexStrainEnergyArray"""
if self.is_built:
return
del self.dt_temp
#print(self._ntotals)
assert self.ntimes > 0, 'ntimes=%s' % self.ntimes
assert self.nelements > 0, 'nelements=%s' % self.nelements
assert self.ntotal > 0, 'ntotal=%s' % self.ntotal
self.ntotal = max(self._ntotals)
#if max(self._ntotals) != min(self._ntotals):
#raise RuntimeError('variable length in RealStrainEnergyArray')
#self.names = []
#self.nelements = self.ntotal // self.ntimes
self.nelements = self.ntotal
self.itime = 0
self.ielement = 0
self.itotal = 0
#self.itotal2 = 0
#self.ntimes = 0
#self.nelements = 0
self.is_built = True
#print("ntimes=%s nelements=%s ntotal=%s" % (self.ntimes, self.nelements, self.ntotal))
dtype = 'float32'
if isinstance(self.nonlinear_factor, integer_types):
dtype = 'int32'
self.build_data(dtype)
def build_data(self, dtype):
"""actually performs the build step"""
self._times = np.zeros(self.ntimes, dtype=dtype)
#self.element = np.zeros(self.nelements, dtype='int32')
self.element = np.zeros((self.ntimes, self.nelements), dtype='int32')
#self.element_data_type = empty(self.nelements, dtype='|U8')
#[energy, percent, density]
assert isinstance(self.ntimes, integer_types), self.ntimes
assert isinstance(self.ntotal, integer_types), self.ntotal
self.data = np.zeros((self.ntimes, self.nelements, 4), dtype='float32')
#def build_dataframe(self):
#"""
#major-axis - the axis
#mode 1 2 3
#freq 1.0 2.0 3.0
#ElementID Item
#1 T1
#T2
#...
#major_axis / top = [
#[1, 2, 3],
#[1.0, 2.0, 3.0]
#]
#minor_axis / headers = [ese, %, sed]
#name = mode
#"""
#import pandas as pd
#headers = self.get_headers()
#ntimes = self.element.shape[0]
#nelements = self.element.shape[1]
#if ntimes == 1:
#column_names, column_values = self._build_dataframe_transient_header()
#element = self.element.ravel()
#self.data_frame = pd.Panel(self.data, items=column_values,
#major_axis=element,
#minor_axis=headers).to_frame()
#self.data_frame.columns.names = column_names
#else:
#nvalues = ntimes * nelements
#element = self.element.ravel()
#if self.nonlinear_factor not in (None, np.nan):
#column_names, column_values = self._build_dataframe_transient_header()
##column_names = column_names[0]
##column_values = column_values[0]
#column_values2 = []
#for value in column_values:
#values2 = []
#for valuei in value:
#values = np.ones(nelements) * valuei
#values2.append(values)
#values3 = np.vstack(values2).ravel()
#column_values2.append(values3)
#df1 = pd.DataFrame(column_values2).T
#df1.columns = column_names
#df2 = pd.DataFrame(element)
#df2.columns = ['ElementID']
#dfs = [df2]
#for i, header in enumerate(headers):
#df = pd.DataFrame(self.data[:, :, i].ravel())
#df.columns = [header]
#dfs.append(df)
#self.data_frame = df1.join(dfs)
##self.data_frame.columns.names = column_names
## remove empty rows
#self.data_frame = self.data_frame[self.data_frame.ElementID != 0]
def __eq__(self, table):
return self.assert_equal(table)
def assert_equal(self, table, rtol=1.e-5, atol=1.e-8):
self._eq_header(table)
assert self.is_sort1 == table.is_sort1
if not np.array_equal(self.element, table.element):
assert self.element.shape == table.element.shape, 'element shape=%s table.shape=%s' % (self.element.shape, table.element.shape)
msg = 'table_name=%r class_name=%s\n' % (self.table_name, self.__class__.__name__)
msg += '%s\n' % str(self.code_information())
msg += 'itime: eid1 eid2\n'
i = 0
for itime in range(self.ntimes):
for eid1, eid2 in zip(self.element[itime, :], table.element[itime, :]):
msg += '%s: %s %s\n' % (itime, eid1, eid2)
if eid1 != eid2 and np.isnan(eid1):
i += 1
if i > 10:
print(msg)
raise ValueError(msg)
if i > 0:
raise ValueError(msg)
if not np.array_equal(self.data, table.data):
msg = 'table_name=%r class_name=%s\n' % (self.table_name, self.__class__.__name__)
msg += '%s\n' % str(self.code_information())
i = 0
for itime in range(self.ntimes):
for ie, eid in enumerate(self.element[itime, :]):
t1 = self.data[itime, ie, :]
t2 = table.data[itime, ie, :]
(energyi1r, engery1i, percenti1, densityi1) = t1
(energyi2r, engery2i, percenti2, densityi2) = t2
print(t1, t2)
if np.isnan(densityi1) or not np.isfinite(densityi1):
if not np.array_equal(t1[:2], t2[:2]):
msg += (
'%s (%s+%si, %s)\n'
'%s (%s+%si, %s)\n' % (
eid, energyi1r, engery1i, percenti1,
' ' * len(str(eid)),
energyi2r, engery2i, percenti2,
))
i += 1
if i > 10:
print(msg)
raise ValueError(msg)
elif not np.array_equal(t1, t2):
msg += (
'%s (%s+%si, %s, %s)\n'
'%s (%s+%si, %s, %s)\n' % (
eid, energyi1r, engery1i, percenti1, densityi1,
' ' * len(str(eid)),
energyi2r, engery2i, percenti2, densityi2,
))
i += 1
if i > 10:
print(msg)
raise ValueError(msg)
#print(msg)
if i > 0:
raise ValueError(msg)
return True
def add_sort1(self, dt, eid, energyr, energyi, percenti, densityi):
"""unvectorized method for adding SORT1 transient data"""
#itime = self.itime // self.nelement_types
assert isinstance(eid, (int, np.int32)) and eid > 0, 'dt=%s eid=%s' % (dt, eid)
itime = self.itime
self._times[itime] = dt
try:
self.element[itime, self.ielement] = eid
#self.element_data_type[self.ielement] = etype
self.data[itime, self.ielement, :] = [energyr, energyi, percenti, densityi]
except IndexError:
print('ComplexStrainEnergyArray', dt, eid, energyr, energyi, percenti, densityi)
raise
self.ielement += 1
self.itotal += 1
def get_stats(self, short=False):
if not self.is_built:
return [
'<%s>\n' % self.__class__.__name__,
' ntimes: %i\n' % self.ntimes,
' ntotal: %i\n' % self.ntotal,
]
nelements = self.nelements
ntimes = self.ntimes
#ntotal = self.ntotal
msg = []
if self.nonlinear_factor not in (None, np.nan): # transient
msg.append(' type=%s ntimes=%i nelements=%i\n'
% (self.__class__.__name__, ntimes, nelements))
ntimes_word = 'ntimes'
else:
msg.append(' type=%s nelements=%i\n'
% (self.__class__.__name__, nelements))
ntimes_word = '1'
headers = self.get_headers()
n = len(headers)
msg.append(' element: [%s, nelements]; eid=100000000 -> total\n' % (ntimes_word))
msg.append(' data: [%s, nelements, %i] where %i=[%s]\n' % (ntimes_word, n, n, str(', '.join(headers))))
msg.append(' data.shape = %s\n' % str(self.data.shape).replace('L', ''))
#msg.append(' element type: %s\n' % self.element_type)
#msg.append(' element name: %s\n ' % self.element_name)
msg += self.get_data_code()
return msg
def write_f06(self, f06_file, header=None, page_stamp='PAGE %s', page_num=1, is_mag_phase=False, is_sort1=True):
if header is None:
header = []
msg_temp = (
' E L E M E N T S T R A I N E N E R G I E S ( A V E R A G E ) \n'
' \n'
' ELEMENT-TYPE = %-5s * TOTAL ENERGY OF ALL ELEMENTS IN PROBLEM = %s\n'
' SUBCASE 1 * TOTAL ENERGY OF ALL ELEMENTS IN SET -1 = %s\n'
'0\n'
' ELEMENT-ID STRAIN-ENERGY (MAG/PHASE) PERCENT OF TOTAL STRAIN-ENERGY-DENSITY\n'
#' 5 2.027844E-10 / 0.0 1.2581 2.027844E-09'
)
ntimes = self.data.shape[0]
#etype = self.element_data_type
for itime in range(ntimes):
dt = self._times[itime] # TODO: rename this...
header = _eigenvalue_header(self, header, itime, ntimes, dt)
total_energy = 0.
total_set_energy = 0.
eids = self.element[itime, :]
# energyr, energyi, percent, density
energyr = self.data[itime, :, 0]
energyi = self.data[itime, :, 1]
percent = self.data[itime, :, 2]
density = self.data[itime, :, 3]
#total_energy = self.data[:, :, 0].sum()
#total_set_energy = energy.sum()
#total_set_energy = energy[itotal]
#total_percent = percent.sum()
msg_temp2 = [msg_temp % (self.element_name, total_energy, total_set_energy)]
f06_file.write(''.join(header + msg_temp2))
fmt1 = ' ' * 23 + '%10i %-13s / %-13s %7.4f %s\n'
fmt2 = '\n TYPE = %-8s SUBTOTAL %13s %.4f\n'
for (eid, energyri, energyii, percenti, densityi) in zip(eids, energyr, energyi, percent, density):
senergyr = write_float_13e(energyri)
senergyi = write_float_13e(energyii)
sdensityi = write_float_13e(densityi)
# ELEMENT-ID STRAIN-ENERGY PERCENT OF TOTAL STRAIN-ENERGY-DENSITY
# 1 -8.307121E-12 0.0052 -2.886861E-12
#if eid == 100000000:
#f06_file.write(fmt2 % (self.element_name, senergyi, percenti))
#break
f06_file.write(fmt1 % (
eid, senergyr, senergyi, percenti, sdensityi))
f06_file.write(page_stamp % page_num)
page_num += 1
#break
return page_num - 1
| 41.614679
| 139
| 0.489859
| 3,472
| 31,752
| 4.341878
| 0.09879
| 0.039403
| 0.01592
| 0.010614
| 0.817446
| 0.793035
| 0.770879
| 0.757148
| 0.75257
| 0.740232
| 0
| 0.042254
| 0.39925
| 31,752
| 762
| 140
| 41.669291
| 0.748047
| 0.247386
| 0
| 0.655629
| 0
| 0.006623
| 0.111902
| 0.007582
| 0
| 0
| 0
| 0.002625
| 0.050773
| 1
| 0.059603
| false
| 0.002208
| 0.013245
| 0.013245
| 0.119205
| 0.02649
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
08fe48afff591605c88865d0e75ca61c751a3c01
| 22,892
|
py
|
Python
|
eventstore/test_forms.py
|
praekeltfoundation/ndoh-hub
|
91d834ff8fe43b930a73d8debdaa0e6af78c5efc
|
[
"BSD-3-Clause"
] | null | null | null |
eventstore/test_forms.py
|
praekeltfoundation/ndoh-hub
|
91d834ff8fe43b930a73d8debdaa0e6af78c5efc
|
[
"BSD-3-Clause"
] | 126
|
2016-07-12T19:39:44.000Z
|
2022-03-24T13:39:38.000Z
|
eventstore/test_forms.py
|
praekeltfoundation/ndoh-hub
|
91d834ff8fe43b930a73d8debdaa0e6af78c5efc
|
[
"BSD-3-Clause"
] | 3
|
2016-09-28T13:16:11.000Z
|
2020-11-07T15:32:37.000Z
|
from unittest import mock
from django.core.files.uploadedfile import SimpleUploadedFile
from django.test import TestCase
from eventstore.forms import MomConnectImportForm
from eventstore.models import ImportRow, MomConnectImport
from registrations.models import ClinicCode
class MomConnectImportFormTests(TestCase):
def setUp(self):
ClinicCode.objects.create(value="123456")
patcher = mock.patch("eventstore.models.is_valid_edd_date")
self.is_valid_edd_date = patcher.start()
self.is_valid_edd_date.return_value = True
patcher = mock.patch("eventstore.forms.validate_momconnect_import")
self.validate_momconnect_import = patcher.start()
def tearDown(self):
self.is_valid_edd_date.stop()
self.validate_momconnect_import.stop()
def test_missing_columns(self):
"""
Should mark the import as error, and write an error for the missing columns
"""
file = SimpleUploadedFile(
"test.csv",
b"msisdn,messaging consent,edd year,edd month,baby dob year,"
b"baby dob month,baby dob day\n",
)
form = MomConnectImportForm(
data={"source": "MomConnect Import"}, files={"file": file}
)
self.assertTrue(form.is_valid())
instance = form.save()
self.assertEqual(instance.status, MomConnectImport.Status.ERROR)
[error] = instance.errors.all()
self.assertEqual(
error.error, "Fields edd_day facility_code id_type not found in header"
)
def test_invalid_file_type(self):
"""
If we cannot decode the file, should mark import as error and write an error
"""
file = SimpleUploadedFile("test.csv", b"\xe8")
form = MomConnectImportForm(
data={"source": "MomConnect Import"}, files={"file": file}
)
self.assertTrue(form.is_valid())
instance = form.save()
self.assertEqual(instance.status, MomConnectImport.Status.ERROR)
[error] = instance.errors.all()
self.assertEqual(error.error, "File is not a CSV")
def test_valid_rows(self):
"""
Should save the rows
"""
file = SimpleUploadedFile(
"test.csv",
b"msisdn,facility code,id type,id number,messaging consent,edd year,"
b"edd month,edd day,baby dob year, baby dob month, baby dob day,language\n"
b"+27820001001,123456,said,9001010001088,true,2021,12,1,,,,afr\n",
)
form = MomConnectImportForm(
data={"source": "MomConnect Import"}, files={"file": file}
)
self.assertTrue(form.is_valid())
instance = form.save()
self.assertEqual(instance.status, MomConnectImport.Status.VALIDATING)
self.assertEqual(instance.errors.count(), 0)
[row] = instance.rows.all()
self.assertEqual(row.row_number, 2)
self.assertEqual(row.msisdn, "+27820001001")
self.assertEqual(row.facility_code, "123456")
self.assertEqual(row.id_type, ImportRow.IDType.SAID)
self.assertEqual(row.id_number, "9001010001088")
self.assertEqual(row.messaging_consent, True)
self.assertEqual(row.research_consent, False)
self.assertEqual(row.edd_year, 2021)
self.assertEqual(row.edd_month, 12)
self.assertEqual(row.edd_day, 1)
self.assertEqual(row.language, ImportRow.Language.AFR)
self.validate_momconnect_import.delay.assert_called_once_with(instance.id)
def test_empty_language(self):
"""
Should save the rows
"""
file = SimpleUploadedFile(
"test.csv",
b"msisdn,facility code,id type,id number,messaging consent,edd year,"
b"edd month,edd day,baby dob year,baby dob month,baby dob day,language\n"
b"+27820001001,123456,said,9001010001088,true,2021,12,1,,,,\n",
)
form = MomConnectImportForm(
data={"source": "MomConnect Import"}, files={"file": file}
)
self.assertTrue(form.is_valid())
instance = form.save()
self.assertEqual(instance.status, MomConnectImport.Status.VALIDATING)
self.assertEqual(instance.errors.count(), 0)
[row] = instance.rows.all()
self.assertEqual(row.language, ImportRow.Language.ENG)
self.validate_momconnect_import.delay.assert_called_once_with(instance.id)
def test_invalid_msisdn(self):
"""
Should mark import as error, and write an error row
"""
file = SimpleUploadedFile(
"test.csv",
b"msisdn,facility code,id type,id number,messaging consent,edd year,"
b"edd month,edd day,baby dob year,baby dob month,baby dob day\n"
b"+1234,123456,said,9001010001088,1,2021,12,1,,,\n",
)
form = MomConnectImportForm(
data={"source": "MomConnect Import"}, files={"file": file}
)
self.assertTrue(form.is_valid())
instance = form.save()
self.assertEqual(instance.status, MomConnectImport.Status.ERROR)
[error] = instance.errors.all()
self.assertEqual(
error.error, "Field msisdn failed validation: Not a possible phone number"
)
self.assertEqual(error.row_number, 2)
def test_invalid_messaging_consent(self):
"""
messaging_consent should be present and be True
"""
file = SimpleUploadedFile(
"test.csv",
b"msisdn,facility code,id type,id number,messaging consent,edd year,"
b"edd month,edd day,baby dob year,baby dob month,baby dob day\n"
b"+27820001001,123456,said,9001010001088,,2021,12,1,,,\n",
)
form = MomConnectImportForm(
data={"source": "MomConnect Import"}, files={"file": file}
)
instance = form.save()
self.assertEqual(instance.status, MomConnectImport.Status.ERROR)
[error] = instance.errors.all()
self.assertEqual(
error.error,
"Field messaging_consent failed validation: This field is required.",
)
self.assertEqual(error.row_number, 2)
file = SimpleUploadedFile(
"test.csv",
b"msisdn,facility code,id type,id number,messaging consent,edd year,"
b"edd month,edd day,baby dob year,baby dob month,baby dob day\n"
b"+27820001001,123456,said,9001010001088,no,2021,12,1,,,\n",
)
form = MomConnectImportForm(
data={"source": "MomConnect Import"}, files={"file": file}
)
instance = form.save()
self.assertEqual(instance.status, MomConnectImport.Status.ERROR)
[error] = instance.errors.all()
self.assertEqual(
error.error, "Field messaging_consent failed validation: False is not true"
)
self.assertEqual(error.row_number, 2)
file = SimpleUploadedFile(
"test.csv",
b"msisdn,facility code,id type,id number,messaging consent,edd year,"
b"edd month,edd day,baby dob year,baby dob month,baby dob day\n"
b"+27820001001,123456,said,9001010001088,foo,2021,12,1,,,\n",
)
form = MomConnectImportForm(
data={"source": "MomConnect Import"}, files={"file": file}
)
instance = form.save()
self.assertEqual(instance.status, MomConnectImport.Status.ERROR)
[error] = instance.errors.all()
self.assertEqual(
error.error,
"Field messaging_consent failed validation: 'foo' value must be either "
"True or False.",
)
self.assertEqual(error.row_number, 2)
def test_invalid_research_consent(self):
"""
research_consent should have a valid value
"""
file = SimpleUploadedFile(
"test.csv",
b"msisdn,facility code,id type,id number,messaging consent,"
b"research_consent,edd year,edd month,edd day,baby dob year,"
b"baby dob month,baby dob day\n"
b"+27820001001,123456,said,9001010001088,true,foo,2021,12,1,,,\n",
)
form = MomConnectImportForm(
data={"source": "MomConnect Import"}, files={"file": file}
)
instance = form.save()
self.assertEqual(instance.status, MomConnectImport.Status.ERROR)
[error] = instance.errors.all()
self.assertEqual(
error.error,
"Field research_consent failed validation: 'foo' value must be either "
"True or False.",
)
self.assertEqual(error.row_number, 2)
def test_research_consent_default(self):
"""
research_consent should default to False
"""
file = SimpleUploadedFile(
"test.csv",
b"msisdn,facility code,id type,id number,messaging consent,"
b"research_consent,edd year,edd month,edd day,baby dob year,"
b"baby dob month,baby dob day\n"
b"+27820001001,123456,said,9001010001088,true,,2021,12,1,,,\n",
)
form = MomConnectImportForm(
data={"source": "MomConnect Import"}, files={"file": file}
)
instance = form.save()
self.assertEqual(instance.status, MomConnectImport.Status.VALIDATING)
self.assertEqual(instance.errors.count(), 0)
[row] = instance.rows.all()
self.assertFalse(row.research_consent)
def test_invalid_previous_optout(self):
"""
previous_optout should have a valid value
"""
file = SimpleUploadedFile(
"test.csv",
b"msisdn,facility code,id type,id number,messaging consent,"
b"previous_optout,edd year,edd month,edd day,baby dob year,"
b"baby dob month,baby dob day\n"
b"+27820001001,123456,said,9001010001088,true,foo,2021,12,1,,,\n",
)
form = MomConnectImportForm(
data={"source": "MomConnect Import"}, files={"file": file}
)
instance = form.save()
self.assertEqual(instance.status, MomConnectImport.Status.ERROR)
[error] = instance.errors.all()
self.assertEqual(
error.error,
"Field previous_optout failed validation: 'foo' value must be either "
"True or False.",
)
self.assertEqual(error.row_number, 2)
def test_previous_optout_default(self):
"""
previous_optout should default to True
"""
file = SimpleUploadedFile(
"test.csv",
b"msisdn,facility code,id type,id number,messaging consent,"
b"previous_optout,edd year,edd month,edd day,baby dob year,"
b"baby dob month,baby dob day\n"
b"+27820001001,123456,said,9001010001088,true,,2021,12,1,,,\n",
)
form = MomConnectImportForm(
data={"source": "MomConnect Import"}, files={"file": file}
)
instance = form.save()
self.assertEqual(instance.status, MomConnectImport.Status.VALIDATING)
self.assertEqual(instance.errors.count(), 0)
[row] = instance.rows.all()
self.assertTrue(row.previous_optout)
def test_facility_code_invalid(self):
"""
facility_code must be in the database
"""
file = SimpleUploadedFile(
"test.csv",
b"msisdn,facility code,id type,id number,messaging consent,"
b"edd year,edd month,edd day,baby dob year,baby dob month,baby dob day\n"
b"+27820001001,,said,9001010001088,true,2021,12,1,,,\n",
)
form = MomConnectImportForm(
data={"source": "MomConnect Import"}, files={"file": file}
)
instance = form.save()
self.assertEqual(instance.status, MomConnectImport.Status.ERROR)
[error] = instance.errors.all()
self.assertEqual(
error.error,
"Field facility_code failed validation: This field is required.",
)
file = SimpleUploadedFile(
"test.csv",
b"msisdn,facility code,id type,id number,messaging consent,"
b"edd year,edd month,edd day,baby dob year,baby dob month,baby dob day\n"
b"+27820001001,abc123,said,9001010001088,true,2021,12,1,,,\n",
)
form = MomConnectImportForm(
data={"source": "MomConnect Import"}, files={"file": file}
)
instance = form.save()
self.assertEqual(instance.status, MomConnectImport.Status.ERROR)
[error] = instance.errors.all()
self.assertEqual(
error.error, "Field facility_code failed validation: Invalid Facility Code"
)
file = SimpleUploadedFile(
"test.csv",
b"msisdn,facility code,id type,id number,messaging consent,"
b"edd year,edd month,edd day,baby dob year,baby dob month,baby dob day\n"
b"+27820001001,1234567,said,9001010001088,true,2021,12,1,,,\n",
)
form = MomConnectImportForm(
data={"source": "MomConnect Import"}, files={"file": file}
)
instance = form.save()
self.assertEqual(instance.status, MomConnectImport.Status.ERROR)
[error] = instance.errors.all()
self.assertEqual(
error.error,
"Field facility_code failed validation: Ensure this value has at most 6 "
"characters (it has 7).",
)
def test_invalid_edd(self):
"""
edd fields should form a valid date, that is between now and 9 months from now
"""
file = SimpleUploadedFile(
"test.csv",
b"msisdn,facility code,id type,id number,messaging consent,"
b"edd year,edd month,edd day,baby dob year,baby dob month,baby dob day\n"
b"+27820001001,123456,said,9001010001088,true,2021,2,29,,,\n",
)
form = MomConnectImportForm(
data={"source": "MomConnect Import"}, files={"file": file}
)
instance = form.save()
self.assertEqual(instance.status, MomConnectImport.Status.ERROR)
[error] = instance.errors.all()
self.assertEqual(
error.error,
"Failed validation: Invalid EDD date, day is out of range for month",
)
file = SimpleUploadedFile(
"test.csv",
b"msisdn,facility code,id type,id number,messaging consent,"
b"edd year,edd month,edd day,baby dob year,baby dob month,baby dob day\n"
b"+27820001001,123456,said,9001010001088,true,2021,Feb,20,,,\n",
)
form = MomConnectImportForm(
data={"source": "MomConnect Import"}, files={"file": file}
)
instance = form.save()
self.assertEqual(instance.status, MomConnectImport.Status.ERROR)
[error] = instance.errors.all()
self.assertEqual(
error.error, "Field edd_month failed validation: Enter a whole number."
)
self.is_valid_edd_date.return_value = False
file = SimpleUploadedFile(
"test.csv",
b"msisdn,facility code,id type,id number,messaging consent,"
b"edd year,edd month,edd day,baby dob year,baby dob month,baby dob day\n"
b"+27820001001,123456,said,9001010001088,true,2121,2,4,,,\n",
)
form = MomConnectImportForm(
data={"source": "MomConnect Import"}, files={"file": file}
)
instance = form.save()
self.assertEqual(instance.status, MomConnectImport.Status.ERROR)
[error] = instance.errors.all()
self.assertEqual(
error.error, "Failed validation: EDD must be between now and 9 months"
)
def test_invalid_baby_dob(self):
"""
baby dob fields should form a valid date
"""
file = SimpleUploadedFile(
"test.csv",
b"msisdn,facility code,id type,id number,messaging consent,"
b"edd year,edd month,edd day,baby dob year,baby dob month,baby dob day\n"
b"+27820001001,123456,said,9001010001088,true,,,,2021,2,29\n",
)
form = MomConnectImportForm(
data={"source": "MomConnect Import"}, files={"file": file}
)
instance = form.save()
self.assertEqual(instance.status, MomConnectImport.Status.ERROR)
[error] = instance.errors.all()
self.assertEqual(
error.error,
"Failed validation: Invalid Baby DOB date, day is out of range for month",
)
file = SimpleUploadedFile(
"test.csv",
b"msisdn,facility code,id type,id number,messaging consent,"
b"edd year,edd month,edd day,baby dob year,baby dob month,baby dob day\n"
b"+27820001001,123456,said,9001010001088,true,,,,2021,Feb,20\n",
)
form = MomConnectImportForm(
data={"source": "MomConnect Import"}, files={"file": file}
)
instance = form.save()
self.assertEqual(instance.status, MomConnectImport.Status.ERROR)
[error] = instance.errors.all()
self.assertEqual(
error.error, "Field baby_dob_month failed validation: Enter a whole number."
)
def test_valid_baby_dob_or_edd(self):
"""
baby dob or edd should be added
"""
file = SimpleUploadedFile(
"test.csv",
b"msisdn,facility code,id type,id number,messaging consent,"
b"edd year,edd month,edd day,baby dob year,baby dob month,baby dob day\n"
b"+27820001001,123456,said,9001010001088,true,,,,,,\n",
)
form = MomConnectImportForm(
data={"source": "MomConnect Import"}, files={"file": file}
)
instance = form.save()
self.assertEqual(instance.status, MomConnectImport.Status.ERROR)
[error] = instance.errors.all()
self.assertEqual(
error.error, "Failed validation: EDD or Baby DOB fields must be populated"
)
def test_idtype_said(self):
"""
id_number is required for sa_id
"""
file = SimpleUploadedFile(
"test.csv",
b"msisdn,facility code,id type,messaging consent,edd year,edd month,"
b"edd day,baby dob year,baby dob month,baby dob day\n"
b"+27820001001,123456,said,true,2021,2,3,,,\n",
)
form = MomConnectImportForm(
data={"source": "MomConnect Import"}, files={"file": file}
)
instance = form.save()
self.assertEqual(instance.status, MomConnectImport.Status.ERROR)
[error] = instance.errors.all()
self.assertEqual(
error.error, "Failed validation: ID number required for SA ID ID type"
)
def test_invalid_id_number(self):
"""
id number must be valid
"""
file = SimpleUploadedFile(
"test.csv",
b"msisdn,facility code,id type,id number,messaging consent,edd year,"
b"edd month,edd day,baby dob year,baby dob month,baby dob day\n"
b"+27820001001,123456,said,9001010001089,true,2021,2,3,,,\n",
)
form = MomConnectImportForm(
data={"source": "MomConnect Import"}, files={"file": file}
)
instance = form.save()
self.assertEqual(instance.status, MomConnectImport.Status.ERROR)
[error] = instance.errors.all()
self.assertEqual(
error.error,
"Field id_number failed validation: Invalid ID number: "
"Failed Luhn checksum",
)
def test_idtype_passport(self):
"""
passport country and passport number are required for passport
"""
file = SimpleUploadedFile(
"test.csv",
b"msisdn,facility code,id type,passport number,passport country,"
b"messaging consent,edd year,edd month,edd day,baby dob year,"
b"baby dob month,baby dob day\n"
b"+27820001001,123456,passport,A1234,,true,2021,2,3,,,\n",
)
form = MomConnectImportForm(
data={"source": "MomConnect Import"}, files={"file": file}
)
instance = form.save()
self.assertEqual(instance.status, MomConnectImport.Status.ERROR)
[error] = instance.errors.all()
self.assertEqual(
error.error,
"Failed validation: Passport country required for passport ID type",
)
file = SimpleUploadedFile(
"test.csv",
b"msisdn,facility code,id type,passport number,passport country,"
b"messaging consent,edd year,edd month,edd day,baby dob year,"
b"baby dob month,baby dob day\n"
b"+27820001001,123456,passport,,zimbabwe,true,2021,2,3,,,\n",
)
form = MomConnectImportForm(
data={"source": "MomConnect Import"}, files={"file": file}
)
instance = form.save()
self.assertEqual(instance.status, MomConnectImport.Status.ERROR)
[error] = instance.errors.all()
self.assertEqual(
error.error,
"Failed validation: Passport number required for passport ID type",
)
def test_idtype_dob(self):
"""
dob is required for none id type
"""
file = SimpleUploadedFile(
"test.csv",
b"msisdn,facility code,id type,messaging consent,edd year,edd month,"
b"edd day,baby dob year,baby dob month,baby dob day\n"
b"+27820001001,123456,none,true,2021,2,3,,,\n",
)
form = MomConnectImportForm(
data={"source": "MomConnect Import"}, files={"file": file}
)
instance = form.save()
self.assertEqual(instance.status, MomConnectImport.Status.ERROR)
[error] = instance.errors.all()
self.assertEqual(
error.error, "Failed validation: Date of birth required for none ID type"
)
def test_invalid_dob(self):
"""
dob should be a valid date
"""
file = SimpleUploadedFile(
"test.csv",
b"msisdn,facility code,id type,messaging consent,edd year,edd month,"
b"edd day,dob year,dob month,dob day,baby dob year,baby dob month,"
b"baby dob day\n"
b"+27820001001,123456,none,true,2021,2,3,1990,2,29,,,\n",
)
form = MomConnectImportForm(
data={"source": "MomConnect Import"}, files={"file": file}
)
instance = form.save()
self.assertEqual(instance.status, MomConnectImport.Status.ERROR)
[error] = instance.errors.all()
self.assertEqual(
error.error,
"Failed validation: Invalid date of birth date, day is out of range for "
"month",
)
| 40.020979
| 88
| 0.601127
| 2,583
| 22,892
| 5.276423
| 0.068525
| 0.043657
| 0.052315
| 0.057451
| 0.840487
| 0.824345
| 0.80857
| 0.794776
| 0.792795
| 0.783476
| 0
| 0.054989
| 0.285034
| 22,892
| 571
| 89
| 40.091068
| 0.777723
| 0.03617
| 0
| 0.627615
| 0
| 0.025105
| 0.339175
| 0.068251
| 0
| 0
| 0
| 0
| 0.169456
| 1
| 0.043933
| false
| 0.014644
| 0.200837
| 0
| 0.246862
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
1c2999438d249cae19d9039c2c4d679652a5ede4
| 175
|
py
|
Python
|
Classes/Logic/LogicStringUtil.py
|
AkulaBs/BSDS-Server-V42
|
2cf195f87838d8ad96b1852b367d39fd7e06b276
|
[
"Apache-2.0"
] | 19
|
2021-12-23T19:15:09.000Z
|
2022-03-03T12:40:33.000Z
|
Classes/Logic/LogicStringUtil.py
|
KulerDev/BSDS-V42
|
80d78c9a6e7ac57121fca6a3a404e630f2792603
|
[
"Apache-2.0"
] | 12
|
2021-12-23T19:16:31.000Z
|
2022-03-04T08:58:18.000Z
|
Classes/Logic/LogicStringUtil.py
|
KulerDev/BSDS-V42
|
80d78c9a6e7ac57121fca6a3a404e630f2792603
|
[
"Apache-2.0"
] | 13
|
2021-12-24T10:00:11.000Z
|
2022-03-14T02:03:54.000Z
|
class LogicStringUtil:
@staticmethod
def getBytes(string):
return string.encode()
@staticmethod
def getByteLength(string):
return len(string)
| 19.444444
| 30
| 0.662857
| 16
| 175
| 7.25
| 0.625
| 0.258621
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.257143
| 175
| 8
| 31
| 21.875
| 0.892308
| 0
| 0
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0
| 0
| 0.285714
| 0.714286
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
98dc0b3993f178cb455eeb32022a8e882921b882
| 40
|
py
|
Python
|
python/GMatElastoPlasticQPot3d/__init__.py
|
tdegeus/ElastoPlasticQPot3d
|
c61987f0dd001d218e067231e1a71b775815a849
|
[
"MIT"
] | null | null | null |
python/GMatElastoPlasticQPot3d/__init__.py
|
tdegeus/ElastoPlasticQPot3d
|
c61987f0dd001d218e067231e1a71b775815a849
|
[
"MIT"
] | 15
|
2018-11-13T08:44:45.000Z
|
2021-08-30T07:09:55.000Z
|
python/GMatElastoPlasticQPot3d/__init__.py
|
tdegeus/ElastoPlasticQPot3d
|
c61987f0dd001d218e067231e1a71b775815a849
|
[
"MIT"
] | null | null | null |
from ._GMatElastoPlasticQPot3d import *
| 20
| 39
| 0.85
| 3
| 40
| 11
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.027778
| 0.1
| 40
| 1
| 40
| 40
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
98dd012e5899e747046c78cc38bc70ecc3397f07
| 340
|
py
|
Python
|
modules/extensions/regexes.py
|
BatedUrGonnaDie/salty_bot
|
f8ad53fdd865762225de49164400db33fae8ba85
|
[
"MIT"
] | 12
|
2015-01-16T16:48:30.000Z
|
2020-08-11T20:11:51.000Z
|
modules/extensions/regexes.py
|
BatedUrGonnaDie/salty_bot
|
f8ad53fdd865762225de49164400db33fae8ba85
|
[
"MIT"
] | 28
|
2015-01-28T10:54:51.000Z
|
2018-04-10T19:06:34.000Z
|
modules/extensions/regexes.py
|
BatedUrGonnaDie/salty_bot
|
f8ad53fdd865762225de49164400db33fae8ba85
|
[
"MIT"
] | 4
|
2015-07-13T08:41:32.000Z
|
2019-01-12T16:19:01.000Z
|
#! /usr/bin/env python3.7
import re
OSU_URL = re.compile("(?:http[s]{0,1}://)?osu.ppy.sh/beatmapsets/(\d+)(?:#.+?)/(\d+)")
YOUTUBE_URL = re.compile("(?:youtube(?:-nocookie)?\.com\/(?:[^\/\n\s]+\/\S+\/|(?:v|e(?:mbed)?)\/|\S*?[?&]v=)|youtu\.be\/)([a-zA-Z0-9_-]{11})")
POLL_NAME = re.compile('"(.+)"')
POLL_OPTIONS = re.compile("\((.+?)\)")
| 37.777778
| 142
| 0.508824
| 51
| 340
| 3.294118
| 0.686275
| 0.214286
| 0.142857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.024922
| 0.055882
| 340
| 8
| 143
| 42.5
| 0.498442
| 0.070588
| 0
| 0
| 0
| 0.4
| 0.606349
| 0.55873
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.2
| 0
| 0.2
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
c7888e08c7b01f52fb35b2daa7614a07c117b682
| 14,650
|
py
|
Python
|
controller/api/tests/test_build.py
|
yun-an/deis
|
de27c11475bb7ca24816f288aa115699a1c37e26
|
[
"Apache-2.0"
] | 3,375
|
2015-01-01T04:03:45.000Z
|
2022-02-08T14:53:45.000Z
|
controller/api/tests/test_build.py
|
yun-an/deis
|
de27c11475bb7ca24816f288aa115699a1c37e26
|
[
"Apache-2.0"
] | 2,422
|
2015-01-01T02:40:01.000Z
|
2021-11-30T07:50:32.000Z
|
controller/api/tests/test_build.py
|
yun-an/deis
|
de27c11475bb7ca24816f288aa115699a1c37e26
|
[
"Apache-2.0"
] | 688
|
2015-01-01T00:36:48.000Z
|
2022-01-22T00:32:07.000Z
|
"""
Unit tests for the Deis api app.
Run the tests with "./manage.py test api"
"""
from __future__ import unicode_literals
import json
from django.contrib.auth.models import User
from django.test import TransactionTestCase
import mock
from rest_framework.authtoken.models import Token
from api.models import Build
from . import mock_status_ok
@mock.patch('api.models.publish_release', lambda *args: None)
class BuildTest(TransactionTestCase):
"""Tests build notification from build system"""
fixtures = ['tests.json']
def setUp(self):
self.user = User.objects.get(username='autotest')
self.token = Token.objects.get(user=self.user).key
@mock.patch('requests.post', mock_status_ok)
def test_build(self):
"""
Test that a null build is created and that users can post new builds
"""
url = '/v1/apps'
response = self.client.post(url, HTTP_AUTHORIZATION='token {}'.format(self.token))
self.assertEqual(response.status_code, 201)
app_id = response.data['id']
# check to see that no initial build was created
url = "/v1/apps/{app_id}/builds".format(**locals())
response = self.client.get(url,
HTTP_AUTHORIZATION='token {}'.format(self.token))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.data['count'], 0)
# post a new build
body = {'image': 'autotest/example'}
response = self.client.post(url, json.dumps(body), content_type='application/json',
HTTP_AUTHORIZATION='token {}'.format(self.token))
self.assertEqual(response.status_code, 201)
build_id = response.data['uuid']
build1 = response.data
self.assertEqual(response.data['image'], body['image'])
# read the build
url = "/v1/apps/{app_id}/builds/{build_id}".format(**locals())
response = self.client.get(url,
HTTP_AUTHORIZATION='token {}'.format(self.token))
self.assertEqual(response.status_code, 200)
build2 = response.data
self.assertEqual(build1, build2)
# post a new build
url = "/v1/apps/{app_id}/builds".format(**locals())
body = {'image': 'autotest/example'}
response = self.client.post(url, json.dumps(body), content_type='application/json',
HTTP_AUTHORIZATION='token {}'.format(self.token))
self.assertEqual(response.status_code, 201)
self.assertIn('x-deis-release', response._headers)
build3 = response.data
self.assertEqual(response.data['image'], body['image'])
self.assertNotEqual(build2['uuid'], build3['uuid'])
# disallow put/patch/delete
response = self.client.put(url, HTTP_AUTHORIZATION='token {}'.format(self.token))
self.assertEqual(response.status_code, 405)
response = self.client.patch(url, HTTP_AUTHORIZATION='token {}'.format(self.token))
self.assertEqual(response.status_code, 405)
response = self.client.delete(url, HTTP_AUTHORIZATION='token {}'.format(self.token))
self.assertEqual(response.status_code, 405)
@mock.patch('requests.post', mock_status_ok)
def test_response_data(self):
"""Test that the serialized response contains only relevant data."""
body = {'id': 'test'}
url = '/v1/apps'
response = self.client.post(url, json.dumps(body),
content_type='application/json',
HTTP_AUTHORIZATION='token {}'.format(self.token))
# post an image as a build
url = "/v1/apps/test/builds".format(**locals())
body = {'image': 'autotest/example'}
response = self.client.post(url, json.dumps(body), content_type='application/json',
HTTP_AUTHORIZATION='token {}'.format(self.token))
for key in response.data:
self.assertIn(key, ['uuid', 'owner', 'created', 'updated', 'app', 'dockerfile',
'image', 'procfile', 'sha'])
expected = {
'owner': self.user.username,
'app': 'test',
'dockerfile': '',
'image': 'autotest/example',
'procfile': {},
'sha': ''
}
self.assertDictContainsSubset(expected, response.data)
@mock.patch('requests.post', mock_status_ok)
def test_build_default_containers(self):
url = '/v1/apps'
response = self.client.post(url, HTTP_AUTHORIZATION='token {}'.format(self.token))
self.assertEqual(response.status_code, 201)
app_id = response.data['id']
# post an image as a build
url = "/v1/apps/{app_id}/builds".format(**locals())
body = {'image': 'autotest/example'}
response = self.client.post(url, json.dumps(body), content_type='application/json',
HTTP_AUTHORIZATION='token {}'.format(self.token))
self.assertEqual(response.status_code, 201)
url = "/v1/apps/{app_id}/containers/cmd".format(**locals())
response = self.client.get(url,
HTTP_AUTHORIZATION='token {}'.format(self.token))
self.assertEqual(response.status_code, 200)
self.assertEqual(len(response.data['results']), 1)
container = response.data['results'][0]
self.assertEqual(container['type'], 'cmd')
self.assertEqual(container['num'], 1)
# start with a new app
url = '/v1/apps'
response = self.client.post(url, HTTP_AUTHORIZATION='token {}'.format(self.token))
self.assertEqual(response.status_code, 201)
app_id = response.data['id']
# post a new build with procfile
url = "/v1/apps/{app_id}/builds".format(**locals())
body = {'image': 'autotest/example',
'sha': 'a'*40,
'dockerfile': "FROM scratch"}
response = self.client.post(url, json.dumps(body), content_type='application/json',
HTTP_AUTHORIZATION='token {}'.format(self.token))
self.assertEqual(response.status_code, 201)
url = "/v1/apps/{app_id}/containers/cmd".format(**locals())
response = self.client.get(url,
HTTP_AUTHORIZATION='token {}'.format(self.token))
self.assertEqual(response.status_code, 200)
self.assertEqual(len(response.data['results']), 1)
container = response.data['results'][0]
self.assertEqual(container['type'], 'cmd')
self.assertEqual(container['num'], 1)
# start with a new app
url = '/v1/apps'
response = self.client.post(url, HTTP_AUTHORIZATION='token {}'.format(self.token))
self.assertEqual(response.status_code, 201)
app_id = response.data['id']
# post a new build with procfile
url = "/v1/apps/{app_id}/builds".format(**locals())
body = {'image': 'autotest/example',
'sha': 'a'*40,
'dockerfile': "FROM scratch",
'procfile': {'worker': 'node worker.js'}}
response = self.client.post(url, json.dumps(body), content_type='application/json',
HTTP_AUTHORIZATION='token {}'.format(self.token))
self.assertEqual(response.status_code, 201)
url = "/v1/apps/{app_id}/containers/cmd".format(**locals())
response = self.client.get(url,
HTTP_AUTHORIZATION='token {}'.format(self.token))
self.assertEqual(response.status_code, 200)
self.assertEqual(len(response.data['results']), 1)
container = response.data['results'][0]
self.assertEqual(container['type'], 'cmd')
self.assertEqual(container['num'], 1)
# start with a new app
url = '/v1/apps'
response = self.client.post(url, HTTP_AUTHORIZATION='token {}'.format(self.token))
self.assertEqual(response.status_code, 201)
app_id = response.data['id']
# post a new build with procfile
url = "/v1/apps/{app_id}/builds".format(**locals())
body = {'image': 'autotest/example',
'sha': 'a'*40,
'procfile': json.dumps({'web': 'node server.js',
'worker': 'node worker.js'})}
response = self.client.post(url, json.dumps(body), content_type='application/json',
HTTP_AUTHORIZATION='token {}'.format(self.token))
self.assertEqual(response.status_code, 201)
url = "/v1/apps/{app_id}/containers/web".format(**locals())
response = self.client.get(url,
HTTP_AUTHORIZATION='token {}'.format(self.token))
self.assertEqual(response.status_code, 200)
self.assertEqual(len(response.data['results']), 1)
container = response.data['results'][0]
self.assertEqual(container['type'], 'web')
self.assertEqual(container['num'], 1)
@mock.patch('requests.post', mock_status_ok)
def test_build_str(self):
"""Test the text representation of a build."""
url = '/v1/apps'
response = self.client.post(url, HTTP_AUTHORIZATION='token {}'.format(self.token))
self.assertEqual(response.status_code, 201)
app_id = response.data['id']
# post a new build
url = "/v1/apps/{app_id}/builds".format(**locals())
body = {'image': 'autotest/example'}
response = self.client.post(url, json.dumps(body), content_type='application/json',
HTTP_AUTHORIZATION='token {}'.format(self.token))
self.assertEqual(response.status_code, 201)
build = Build.objects.get(uuid=response.data['uuid'])
self.assertEqual(str(build), "{}-{}".format(
response.data['app'], response.data['uuid'][:7]))
@mock.patch('requests.post', mock_status_ok)
def test_admin_can_create_builds_on_other_apps(self):
"""If a user creates an application, an administrator should be able
to push builds.
"""
# create app as non-admin
user = User.objects.get(username='autotest2')
token = Token.objects.get(user=user).key
url = '/v1/apps'
response = self.client.post(url, HTTP_AUTHORIZATION='token {}'.format(token))
self.assertEqual(response.status_code, 201)
app_id = response.data['id']
# post a new build as admin
url = "/v1/apps/{app_id}/builds".format(**locals())
body = {'image': 'autotest/example'}
response = self.client.post(url, json.dumps(body), content_type='application/json',
HTTP_AUTHORIZATION='token {}'.format(self.token))
self.assertEqual(response.status_code, 201)
build = Build.objects.get(uuid=response.data['uuid'])
self.assertEqual(str(build), "{}-{}".format(
response.data['app'], response.data['uuid'][:7]))
@mock.patch('requests.post', mock_status_ok)
def test_unauthorized_user_cannot_modify_build(self):
"""
An unauthorized user should not be able to modify other builds.
Since an unauthorized user can't access the application, these
requests should return a 403.
"""
app_id = 'autotest'
url = '/v1/apps'
body = {'id': app_id}
response = self.client.post(url, json.dumps(body), content_type='application/json',
HTTP_AUTHORIZATION='token {}'.format(self.token))
unauthorized_user = User.objects.get(username='autotest2')
unauthorized_token = Token.objects.get(user=unauthorized_user).key
url = '{}/{}/builds'.format(url, app_id)
body = {'image': 'foo'}
response = self.client.post(url, json.dumps(body), content_type='application/json',
HTTP_AUTHORIZATION='token {}'.format(unauthorized_token))
self.assertEqual(response.status_code, 403)
@mock.patch('requests.post', mock_status_ok)
def test_new_build_does_not_scale_up_automatically(self):
"""
After the first initial deploy, if the containers are scaled down to zero,
they should stay that way on a new release.
"""
url = '/v1/apps'
response = self.client.post(url, HTTP_AUTHORIZATION='token {}'.format(self.token))
self.assertEqual(response.status_code, 201)
app_id = response.data['id']
# post a new build
url = "/v1/apps/{app_id}/builds".format(**locals())
body = {'image': 'autotest/example',
'sha': 'a'*40,
'procfile': json.dumps({'web': 'node server.js',
'worker': 'node worker.js'})}
response = self.client.post(url, json.dumps(body), content_type='application/json',
HTTP_AUTHORIZATION='token {}'.format(self.token))
self.assertEqual(response.status_code, 201)
url = "/v1/apps/{app_id}/containers/web".format(**locals())
response = self.client.get(url,
HTTP_AUTHORIZATION='token {}'.format(self.token))
self.assertEqual(response.status_code, 200)
self.assertEqual(len(response.data['results']), 1)
# scale to zero
url = "/v1/apps/{app_id}/scale".format(**locals())
body = {'web': 0}
response = self.client.post(url, json.dumps(body), content_type='application/json',
HTTP_AUTHORIZATION='token {}'.format(self.token))
self.assertEqual(response.status_code, 204)
# post another build
url = "/v1/apps/{app_id}/builds".format(**locals())
body = {'image': 'autotest/example',
'sha': 'a'*40,
'procfile': json.dumps({'web': 'node server.js',
'worker': 'node worker.js'})}
response = self.client.post(url, json.dumps(body), content_type='application/json',
HTTP_AUTHORIZATION='token {}'.format(self.token))
self.assertEqual(response.status_code, 201)
url = "/v1/apps/{app_id}/containers/web".format(**locals())
response = self.client.get(url,
HTTP_AUTHORIZATION='token {}'.format(self.token))
self.assertEqual(response.status_code, 200)
self.assertEqual(len(response.data['results']), 0)
| 49.661017
| 93
| 0.590034
| 1,651
| 14,650
| 5.13507
| 0.109631
| 0.090234
| 0.072187
| 0.112291
| 0.792286
| 0.775065
| 0.762326
| 0.759377
| 0.755957
| 0.729535
| 0
| 0.01505
| 0.265256
| 14,650
| 294
| 94
| 49.829932
| 0.772575
| 0.074812
| 0
| 0.710638
| 0
| 0
| 0.154277
| 0.038513
| 0
| 0
| 0
| 0
| 0.234043
| 1
| 0.034043
| false
| 0
| 0.034043
| 0
| 0.076596
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
c7c423dcd660acbea04b34e366433c5faba93db0
| 75
|
py
|
Python
|
malaya_speech/train/model/tacotron2_nvidia/__init__.py
|
ishine/malaya-speech
|
fd34afc7107af1656dff4b3201fa51dda54fde18
|
[
"MIT"
] | 111
|
2020-08-31T04:58:54.000Z
|
2022-03-29T15:44:18.000Z
|
malaya_speech/train/model/tacotron2_nvidia/__init__.py
|
ishine/malaya-speech
|
fd34afc7107af1656dff4b3201fa51dda54fde18
|
[
"MIT"
] | 14
|
2020-12-16T07:27:22.000Z
|
2022-03-15T17:39:01.000Z
|
malaya_speech/train/model/tacotron2_nvidia/__init__.py
|
ishine/malaya-speech
|
fd34afc7107af1656dff4b3201fa51dda54fde18
|
[
"MIT"
] | 29
|
2021-02-09T08:57:15.000Z
|
2022-03-12T14:09:19.000Z
|
from .model import Model
from ..tacotron2 import generate_guided_attention
| 25
| 49
| 0.853333
| 10
| 75
| 6.2
| 0.7
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.014925
| 0.106667
| 75
| 2
| 50
| 37.5
| 0.910448
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
40458cbb6dc4c78db2ef9e56546de2106fbbb007
| 7,525
|
py
|
Python
|
tfx/tools/cli/commands/pipeline.py
|
romeokienzler/tfx
|
6449173532bc35b78dbfb93aa89a688a7278ef59
|
[
"Apache-2.0"
] | null | null | null |
tfx/tools/cli/commands/pipeline.py
|
romeokienzler/tfx
|
6449173532bc35b78dbfb93aa89a688a7278ef59
|
[
"Apache-2.0"
] | null | null | null |
tfx/tools/cli/commands/pipeline.py
|
romeokienzler/tfx
|
6449173532bc35b78dbfb93aa89a688a7278ef59
|
[
"Apache-2.0"
] | 1
|
2020-06-05T08:31:32.000Z
|
2020-06-05T08:31:32.000Z
|
# Copyright 2019 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Commands for pipeline group."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import click
from typing import Text
from tfx.tools.cli import labels
from tfx.tools.cli.cli_context import Context
from tfx.tools.cli.cli_context import pass_context
from tfx.tools.cli.handler import handler_factory
@click.group('pipeline')
def pipeline_group() -> None:
pass
# TODO(b/132286477): Add support for requirements file.
@pipeline_group.command('create', help='Create a pipeline')
@pass_context
@click.option(
'--engine', default='auto', type=str, help='Orchestrator for pipelines')
@click.option(
'--pipeline_path', required=True, type=str, help='Path to Python DSL.')
@click.option(
'--package_path',
type=str,
help='Path to the pipeline output workflow file.')
@click.option(
'--endpoint',
default=None,
type=str,
help='Endpoint of the KFP API service to connect.')
@click.option(
'--iap_client_id',
default=None,
type=str,
help='Client ID for IAP protected endpoint.')
@click.option(
'-n',
'--namespace',
default='kubeflow',
type=str,
help='Kubernetes namespace to connect to the KFP API.')
def create_pipeline(ctx: Context, engine: Text, pipeline_path: Text,
package_path: Text, endpoint: Text, iap_client_id: Text,
namespace: Text) -> None:
"""Command definition to create a pipeline."""
click.echo('Creating pipeline')
ctx.flags_dict[labels.ENGINE_FLAG] = engine
ctx.flags_dict[labels.PIPELINE_DSL_PATH] = pipeline_path
ctx.flags_dict[labels.PIPELINE_PACKAGE_PATH] = package_path
ctx.flags_dict[labels.ENDPOINT] = endpoint
ctx.flags_dict[labels.IAP_CLIENT_ID] = iap_client_id
ctx.flags_dict[labels.NAMESPACE] = namespace
handler_factory.create_handler(ctx.flags_dict).create_pipeline()
@pipeline_group.command('update', help='Update an existing pipeline.')
@pass_context
@click.option(
'--engine', default='auto', type=str, help='Orchestrator for pipelines')
@click.option(
'--pipeline_path', required=True, type=str, help='Path to Python DSL file')
@click.option(
'--package_path',
type=str,
help='Path to the output workflow tar.gz file.')
@click.option(
'--endpoint',
default='',
type=str,
help='Endpoint of the KFP API service to connect.')
@click.option(
'--iap_client_id',
default='',
type=str,
help='Client ID for IAP protected endpoint.')
@click.option(
'-n',
'--namespace',
default='kubeflow',
type=str,
help='Kubernetes namespace to connect to the KFP API.')
def update_pipeline(ctx: Context, engine: Text, pipeline_path: Text,
package_path: Text, endpoint: Text, iap_client_id: Text,
namespace: Text) -> None:
"""Command definition to update a pipeline."""
click.echo('Updating pipeline')
ctx.flags_dict[labels.ENGINE_FLAG] = engine
ctx.flags_dict[labels.PIPELINE_DSL_PATH] = pipeline_path
ctx.flags_dict[labels.PIPELINE_PACKAGE_PATH] = package_path
ctx.flags_dict[labels.ENDPOINT] = endpoint
ctx.flags_dict[labels.IAP_CLIENT_ID] = iap_client_id
ctx.flags_dict[labels.NAMESPACE] = namespace
handler_factory.create_handler(ctx.flags_dict).update_pipeline()
@pipeline_group.command('delete', help='Delete a pipeline')
@pass_context
@click.option(
'--engine', default='auto', type=str, help='Orchestrator for pipelines')
@click.option(
'--pipeline_name', required=True, type=str, help='Name of the pipeline')
@click.option(
'--endpoint',
default='',
type=str,
help='Endpoint of the KFP API service to connect.')
@click.option(
'--iap_client_id',
default='',
type=str,
help='Client ID for IAP protected endpoint.')
@click.option(
'-n',
'--namespace',
default='kubeflow',
type=str,
help='Kubernetes namespace to connect to the KFP API.')
def delete_pipeline(ctx: Context, engine: Text, pipeline_name: Text,
endpoint: Text, iap_client_id: Text,
namespace: Text) -> None:
"""Command definition to delete a pipeline."""
click.echo('Deleting pipeline')
ctx.flags_dict[labels.ENGINE_FLAG] = engine
ctx.flags_dict[labels.PIPELINE_NAME] = pipeline_name
ctx.flags_dict[labels.ENDPOINT] = endpoint
ctx.flags_dict[labels.IAP_CLIENT_ID] = iap_client_id
ctx.flags_dict[labels.NAMESPACE] = namespace
handler_factory.create_handler(ctx.flags_dict).delete_pipeline()
@pipeline_group.command('list', help='List all the pipelines')
@pass_context
@click.option(
'--engine', default='auto', type=str, help='orchestrator for pipelines')
@click.option(
'--endpoint',
default='',
type=str,
help='Endpoint of the KFP API service to connect.')
@click.option(
'--iap_client_id',
default='',
type=str,
help='Client ID for IAP protected endpoint.')
@click.option(
'-n',
'--namespace',
default='kubeflow',
type=str,
help='Kubernetes namespace to connect to the KFP API.')
def list_pipelines(ctx: Context, engine: Text, endpoint: Text,
iap_client_id: Text, namespace: Text) -> None:
"""Command definition to list pipelines."""
click.echo('Listing all pipelines')
ctx.flags_dict[labels.ENGINE_FLAG] = engine
ctx.flags_dict[labels.ENDPOINT] = endpoint
ctx.flags_dict[labels.IAP_CLIENT_ID] = iap_client_id
ctx.flags_dict[labels.NAMESPACE] = namespace
handler_factory.create_handler(ctx.flags_dict).list_pipelines()
@pipeline_group.command('compile', help='Compile a pipeline')
@pass_context
@click.option(
'--engine', default='auto', type=str, help='Orchestrator for pipelines')
@click.option(
'--pipeline_path', required=True, type=str, help='Path to Python DSL.')
@click.option(
'--package_path', type=str, help='Path to the output workflow tar.gz file.')
@click.option(
'--endpoint',
default='',
type=str,
help='Endpoint of the KFP API service to connect.')
@click.option(
'--iap_client_id',
default='',
type=str,
help='Client ID for IAP protected endpoint.')
@click.option(
'-n',
'--namespace',
default='kubeflow',
type=str,
help='Kubernetes namespace to connect to the KFP API.')
def compile_pipeline(ctx: Context, engine: Text, pipeline_path: Text,
package_path: Text, endpoint: Text, iap_client_id: Text,
namespace: Text) -> None:
"""Command definition to create a pipeline."""
click.echo('Compiling pipeline')
ctx.flags_dict[labels.ENGINE_FLAG] = engine
ctx.flags_dict[labels.PIPELINE_DSL_PATH] = pipeline_path
ctx.flags_dict[labels.PIPELINE_PACKAGE_PATH] = package_path
ctx.flags_dict[labels.ENDPOINT] = endpoint
ctx.flags_dict[labels.IAP_CLIENT_ID] = iap_client_id
ctx.flags_dict[labels.NAMESPACE] = namespace
handler_factory.create_handler(ctx.flags_dict).compile_pipeline()
| 34.677419
| 80
| 0.706047
| 1,008
| 7,525
| 5.112103
| 0.143849
| 0.04968
| 0.07452
| 0.094314
| 0.757035
| 0.739569
| 0.732583
| 0.720551
| 0.720551
| 0.720551
| 0
| 0.002715
| 0.167973
| 7,525
| 216
| 81
| 34.837963
| 0.820316
| 0.113621
| 0
| 0.773481
| 0
| 0
| 0.239813
| 0
| 0
| 0
| 0
| 0.00463
| 0
| 1
| 0.033149
| false
| 0.038674
| 0.049724
| 0
| 0.082873
| 0.005525
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
409c669213b3e763fa02dd8102741ac5297f8fde
| 27
|
py
|
Python
|
evoke/Permit/__init__.py
|
howiemac/evoke5
|
430d6dfd719f8c88a4c3de2b735f8736187ff19b
|
[
"BSD-3-Clause"
] | null | null | null |
evoke/Permit/__init__.py
|
howiemac/evoke5
|
430d6dfd719f8c88a4c3de2b735f8736187ff19b
|
[
"BSD-3-Clause"
] | null | null | null |
evoke/Permit/__init__.py
|
howiemac/evoke5
|
430d6dfd719f8c88a4c3de2b735f8736187ff19b
|
[
"BSD-3-Clause"
] | null | null | null |
from .Permit import Permit
| 13.5
| 26
| 0.814815
| 4
| 27
| 5.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.148148
| 27
| 1
| 27
| 27
| 0.956522
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
40e6b63828e2285e4bcffa07db0ef97aa285748e
| 28
|
py
|
Python
|
notochord/test/classifier/__init__.py
|
jroose/notochord
|
da9a6ff5d0fabbf0694d0bee1b81a240b66fa006
|
[
"MIT"
] | null | null | null |
notochord/test/classifier/__init__.py
|
jroose/notochord
|
da9a6ff5d0fabbf0694d0bee1b81a240b66fa006
|
[
"MIT"
] | null | null | null |
notochord/test/classifier/__init__.py
|
jroose/notochord
|
da9a6ff5d0fabbf0694d0bee1b81a240b66fa006
|
[
"MIT"
] | null | null | null |
from .RandomForest import *
| 14
| 27
| 0.785714
| 3
| 28
| 7.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 28
| 1
| 28
| 28
| 0.916667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
40eb80a3e6df4bd2a7089ef97648a3e1aac1598a
| 2,675
|
py
|
Python
|
test-unit/PythonToJavascript/converters_test/ListSliceConverter_test.py
|
stoogoff/python-to-javascript
|
4349b09b15ada544501e7091c7ff1574487e7598
|
[
"MIT"
] | 1
|
2021-11-19T09:56:41.000Z
|
2021-11-19T09:56:41.000Z
|
test-unit/PythonToJavascript/converters_test/ListSliceConverter_test.py
|
stoogoff/python-to-javascript
|
4349b09b15ada544501e7091c7ff1574487e7598
|
[
"MIT"
] | 2
|
2022-02-25T23:11:27.000Z
|
2022-03-04T10:22:14.000Z
|
test-unit/PythonToJavascript/converters_test/ListSliceConverter_test.py
|
stoogoff/python-to-javascript
|
4349b09b15ada544501e7091c7ff1574487e7598
|
[
"MIT"
] | 4
|
2021-05-06T19:03:19.000Z
|
2022-03-06T13:52:30.000Z
|
from utils import parseSource, nodesToString, nodesToLines, dumpNodes, dumpTree
from converters import ListSliceConverter
def test_ListSliceGather_01():
src = """
alist[ start : finish ]
"""
matches = ListSliceConverter().gather( parseSource( src ) )
match = matches[ 0 ]
assert match.start.toString() == 'start'
assert match.colon.toString() == ':'
assert match.finish.toString() == 'finish'
def test_ListSliceGather_02():
src = """
alist[ : finish ]
"""
matches = ListSliceConverter().gather( parseSource( src ) )
match = matches[ 0 ]
assert "start" not in match
assert match.colon.toString() == ':'
assert match.finish.toString() == 'finish'
def test_ListSliceGather_03():
src = """
alist[ start : ]
"""
matches = ListSliceConverter().gather( parseSource( src ) )
match = matches[ 0 ]
assert match.start.toString() == 'start'
assert match.colon.toString() == ':'
assert "finish" not in match
def test_ListSliceGather_04():
src = """
alist[ : ]
"""
matches = ListSliceConverter().gather( parseSource( src ) )
match = matches[ 0 ]
assert "start" not in match
assert match.colon.toString() == ':'
assert "finish" not in match
def test_ListSliceProcess_01():
src = """
alist[ start : finish ]
"""
nodes = parseSource( src )
cvtr = ListSliceConverter()
matches = cvtr.gather( nodes )
cvtr.processAll( matches )
assert nodesToString( nodes ) == """alist.slice( start, finish )"""
def test_ListSliceProcess_02():
src = """
alist[ 1 + 2 + 3 : f( x ) ]
"""
nodes = parseSource( src )
cvtr = ListSliceConverter()
matches = cvtr.gather( nodes )
cvtr.processAll( matches )
assert nodesToString( nodes ) == """alist.slice( 1 + 2 + 3, f( x ) )"""
def test_ListSliceProcess_03():
src = """
alist[ : finish ]
"""
nodes = parseSource( src )
cvtr = ListSliceConverter()
matches = cvtr.gather( nodes )
cvtr.processAll( matches )
assert nodesToString( nodes ) == """alist.slice( 0, finish )"""
def test_ListSliceProcess_04():
src = """
alist[ start : ]
"""
nodes = parseSource( src )
cvtr = ListSliceConverter()
matches = cvtr.gather( nodes )
cvtr.processAll( matches )
assert nodesToString( nodes ) == """alist.slice( start )"""
def test_ListSliceProcess_05():
src = """
alist[ : ]
"""
nodes = parseSource( src )
cvtr = ListSliceConverter()
matches = cvtr.gather( nodes )
cvtr.processAll( matches )
assert nodesToString( nodes ) == """alist.slice()"""
| 28.457447
| 79
| 0.60486
| 262
| 2,675
| 6.10687
| 0.156489
| 0.039375
| 0.071875
| 0.071875
| 0.785
| 0.76
| 0.76
| 0.76
| 0.76
| 0.76
| 0
| 0.0145
| 0.252336
| 2,675
| 93
| 80
| 28.763441
| 0.7855
| 0
| 0
| 0.795181
| 0
| 0
| 0.168224
| 0
| 0
| 0
| 0
| 0
| 0.204819
| 1
| 0.108434
| false
| 0
| 0.024096
| 0
| 0.13253
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
dc011bc9eb61a80d2db7f985f3aaf8a17530826a
| 103
|
py
|
Python
|
neo_db/test_neo.py
|
liupuchun/KGQA-of-HongLouMeng
|
2d2a1192f7d2850fb306dbb948177370140a652d
|
[
"MIT"
] | 1
|
2020-06-03T08:07:37.000Z
|
2020-06-03T08:07:37.000Z
|
neo_db/test_neo.py
|
liupuchun/KGQA-of-HongLouMeng
|
2d2a1192f7d2850fb306dbb948177370140a652d
|
[
"MIT"
] | null | null | null |
neo_db/test_neo.py
|
liupuchun/KGQA-of-HongLouMeng
|
2d2a1192f7d2850fb306dbb948177370140a652d
|
[
"MIT"
] | null | null | null |
from query_graph import query_name,query_all
#name = input("name=")
#s = query_name(name)
query_all()
| 17.166667
| 44
| 0.747573
| 17
| 103
| 4.235294
| 0.470588
| 0.25
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.116505
| 103
| 6
| 45
| 17.166667
| 0.791209
| 0.398058
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
909295fdaddb1cccdadc31d2af5beaaa554d3b0f
| 27
|
py
|
Python
|
d_serialize/__init__.py
|
Martlark/d_serialize
|
c4e4dfa35344a91d423abcf76d08557fee757afd
|
[
"MIT"
] | null | null | null |
d_serialize/__init__.py
|
Martlark/d_serialize
|
c4e4dfa35344a91d423abcf76d08557fee757afd
|
[
"MIT"
] | null | null | null |
d_serialize/__init__.py
|
Martlark/d_serialize
|
c4e4dfa35344a91d423abcf76d08557fee757afd
|
[
"MIT"
] | null | null | null |
from .d_serialize import *
| 13.5
| 26
| 0.777778
| 4
| 27
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.148148
| 27
| 1
| 27
| 27
| 0.869565
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
90a567c11c1581ac550720ac88fde1a1d5335d35
| 202
|
py
|
Python
|
imgapp/admin.py
|
rtice3/imgdb
|
9ef7a105632e31011324bec028005a7435ae052f
|
[
"MIT"
] | null | null | null |
imgapp/admin.py
|
rtice3/imgdb
|
9ef7a105632e31011324bec028005a7435ae052f
|
[
"MIT"
] | null | null | null |
imgapp/admin.py
|
rtice3/imgdb
|
9ef7a105632e31011324bec028005a7435ae052f
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
# Register your models here.
from .models import UnprocessedImg
admin.site.register(UnprocessedImg)
from .models import ProcessedImg
admin.site.register(ProcessedImg)
| 22.444444
| 35
| 0.831683
| 25
| 202
| 6.72
| 0.48
| 0.119048
| 0.190476
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.10396
| 202
| 9
| 36
| 22.444444
| 0.928177
| 0.128713
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.6
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
2912e89608bf3ee49f3fa6fb32a2696858bbf7de
| 6,951
|
py
|
Python
|
matrix-python-project/cover_generator/typesetting/model/three.py
|
hokaso/hocassian-media-matrix
|
2c2e5a4c72dfa43d2eed0f083f5b19238aea2765
|
[
"MIT"
] | 141
|
2021-06-27T03:18:54.000Z
|
2022-03-17T03:24:26.000Z
|
matrix-python-project/cover_generator/typesetting/model/three.py
|
hokaso/hocassian-media-matrix
|
2c2e5a4c72dfa43d2eed0f083f5b19238aea2765
|
[
"MIT"
] | 1
|
2021-08-06T17:35:01.000Z
|
2021-08-06T17:35:01.000Z
|
matrix-python-project/cover_generator/typesetting/model/three.py
|
hokaso/hocassian-media-matrix
|
2c2e5a4c72dfa43d2eed0f083f5b19238aea2765
|
[
"MIT"
] | 24
|
2021-06-29T01:58:59.000Z
|
2022-03-02T01:42:43.000Z
|
import sys, os, time, json, random
from PIL import Image, ImageDraw, ImageFont, ImageFilter
from cover_generator.typesetting.more import More
from cover_generator.typesetting.mark import Mark
from cover_generator.typesetting.build import Build
from utils.snow_id import SnowId
sys.path.append(os.getcwd())
class Three(object):
def __init__(self, folder_key):
self.image_list = None
self.rank_model = None
self.tb = None
with open("cover_generator/typesetting/style.json", 'r') as f0:
style_config = json.load(f0)
self.model = style_config["three"]
self.func_map = {
1: self.horizontal_build,
2: self.vertical_build,
3: self.triple_vertical_build,
4: self.triple_horizontal_build
}
self._build = Build(folder_key, folder_key + "_temp")
def horizontal(self, image_list):
return More(image_list, self.model[0]["unit_detail"], "31").main()
def vertical(self, image_list):
return More(image_list, self.model[1]["unit_detail"], "32").main()
def triple_vertical(self, image_list):
return More(image_list, self.model[2]["unit_detail"], "33").main()
def triple_horizontal(self, image_list):
return More(image_list, self.model[3]["unit_detail"], "34").main()
def build(self, image_list, model):
self.tb = Image.open("cover_generator/background.jpg")
self.image_list = image_list
self.rank_model = model
self.func_map[int(model["model_id"][1])]()
def horizontal_build(self):
# 贴第一张图
image = self.image_list[self.rank_model["model_match"][0][1]]
model = self.model[0]["unit_detail"][0]
rate, area = Mark(image["width"], image["height"], model["width"], model["height"]).crop()
pic_1 = self._build.build_up(image["filename"], rate, area)
# 贴第二张图
image = self.image_list[self.rank_model["model_match"][1][1]]
model = self.model[0]["unit_detail"][1]
rate, area = Mark(image["width"], image["height"], model["width"], model["height"]).crop()
pic_2 = self._build.build_up(image["filename"], rate, area)
# 贴第三张图
image = self.image_list[self.rank_model["model_match"][2][1]]
model = self.model[0]["unit_detail"][2]
rate, area = Mark(image["width"], image["height"], model["width"], model["height"]).crop()
pic_3 = self._build.build_up(image["filename"], rate, area)
# 随机对相同宽高的图片进行shuffle
pic_list = [pic_1, pic_2, pic_3]
random.shuffle(pic_list)
# 保存
self.tb.paste(pic_list[0], (0, 0))
self.tb.paste(pic_list[1], (0, 480))
self.tb.paste(pic_list[2], (0, 960))
self._build.save(self.tb)
def vertical_build(self):
# 贴第一张图
image = self.image_list[self.rank_model["model_match"][0][1]]
model = self.model[1]["unit_detail"][0]
rate, area = Mark(image["width"], image["height"], model["width"], model["height"]).crop()
pic_1 = self._build.build_up(image["filename"], rate, area)
# 贴第二张图
image = self.image_list[self.rank_model["model_match"][1][1]]
model = self.model[1]["unit_detail"][1]
rate, area = Mark(image["width"], image["height"], model["width"], model["height"]).crop()
pic_2 = self._build.build_up(image["filename"], rate, area)
# 贴第三张图
image = self.image_list[self.rank_model["model_match"][2][1]]
model = self.model[1]["unit_detail"][2]
rate, area = Mark(image["width"], image["height"], model["width"], model["height"]).crop()
pic_3 = self._build.build_up(image["filename"], rate, area)
# 随机对相同宽高的图片进行shuffle
pic_list = [pic_1, pic_2, pic_3]
random.shuffle(pic_list)
# 保存
self.tb.paste(pic_list[0], (0, 0))
self.tb.paste(pic_list[1], (360, 0))
self.tb.paste(pic_list[2], (720, 0))
self._build.save(self.tb)
def triple_vertical_build(self):
# 贴第一张图
image = self.image_list[self.rank_model["model_match"][0][1]]
model = self.model[2]["unit_detail"][0]
rate, area = Mark(image["width"], image["height"], model["width"], model["height"]).crop()
pic_1 = self._build.build_up(image["filename"], rate, area)
# 贴第二张图
image = self.image_list[self.rank_model["model_match"][1][1]]
model = self.model[2]["unit_detail"][1]
rate, area = Mark(image["width"], image["height"], model["width"], model["height"]).crop()
pic_2 = self._build.build_up(image["filename"], rate, area)
# 贴第三张图
image = self.image_list[self.rank_model["model_match"][2][1]]
model = self.model[2]["unit_detail"][2]
rate, area = Mark(image["width"], image["height"], model["width"], model["height"]).crop()
pic_3 = self._build.build_up(image["filename"], rate, area)
# 随机对相同宽高的图片进行shuffle
pic_list = [pic_1, pic_2]
random.shuffle(pic_list)
# 结构也需要shuffle
kind = random.randint(0, 1)
# 保存
if kind == 0:
self.tb.paste(pic_list[0], (0, 0))
self.tb.paste(pic_list[1], (0, 720))
self.tb.paste(pic_3, (540, 0))
else:
self.tb.paste(pic_list[0], (540, 0))
self.tb.paste(pic_list[1], (540, 720))
self.tb.paste(pic_3, (0, 0))
self._build.save(self.tb)
def triple_horizontal_build(self):
# 贴第一张图
image = self.image_list[self.rank_model["model_match"][0][1]]
model = self.model[3]["unit_detail"][0]
rate, area = Mark(image["width"], image["height"], model["width"], model["height"]).crop()
pic_1 = self._build.build_up(image["filename"], rate, area)
# 贴第二张图
image = self.image_list[self.rank_model["model_match"][1][1]]
model = self.model[3]["unit_detail"][1]
rate, area = Mark(image["width"], image["height"], model["width"], model["height"]).crop()
pic_2 = self._build.build_up(image["filename"], rate, area)
# 贴第三张图
image = self.image_list[self.rank_model["model_match"][2][1]]
model = self.model[3]["unit_detail"][2]
rate, area = Mark(image["width"], image["height"], model["width"], model["height"]).crop()
pic_3 = self._build.build_up(image["filename"], rate, area)
# 随机对相同宽高的图片进行shuffle
pic_list = [pic_1, pic_2]
random.shuffle(pic_list)
# 结构也需要shuffle
kind = random.randint(0, 1)
# 保存
if kind == 0:
self.tb.paste(pic_list[0], (0, 0))
self.tb.paste(pic_list[1], (540, 0))
self.tb.paste(pic_3, (0, 720))
else:
self.tb.paste(pic_list[0], (0, 720))
self.tb.paste(pic_list[1], (540, 720))
self.tb.paste(pic_3, (0, 0))
self._build.save(self.tb)
| 36.015544
| 98
| 0.592145
| 952
| 6,951
| 4.138655
| 0.09979
| 0.054822
| 0.06269
| 0.063959
| 0.81269
| 0.801523
| 0.768782
| 0.753807
| 0.743147
| 0.743147
| 0
| 0.033403
| 0.237664
| 6,951
| 192
| 99
| 36.203125
| 0.710134
| 0.02719
| 0
| 0.504065
| 0
| 0
| 0.113222
| 0.010091
| 0
| 0
| 0
| 0
| 0
| 1
| 0.081301
| false
| 0
| 0.04878
| 0.03252
| 0.170732
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
295cf9e3843ea79bc5abd90a406e7a0075aa7d40
| 41
|
py
|
Python
|
auth/signals/__init__.py
|
sanjeevkumar12/flask-app-apispecs
|
c3ab260e2dd533f647224337fcbab6e8e22dba5b
|
[
"MIT"
] | null | null | null |
auth/signals/__init__.py
|
sanjeevkumar12/flask-app-apispecs
|
c3ab260e2dd533f647224337fcbab6e8e22dba5b
|
[
"MIT"
] | null | null | null |
auth/signals/__init__.py
|
sanjeevkumar12/flask-app-apispecs
|
c3ab260e2dd533f647224337fcbab6e8e22dba5b
|
[
"MIT"
] | null | null | null |
from .events import USER_REGISTER_SIGNAL
| 20.5
| 40
| 0.878049
| 6
| 41
| 5.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.097561
| 41
| 1
| 41
| 41
| 0.918919
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.