hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
37f6f46c248a75eb9e891880820a6072c1d2a444
| 52
|
py
|
Python
|
artifact-test/interop_tests/source_invalid/src/two-b.py
|
Josh-Thompson/artifact
|
b2eb8948eb3004aecdc292863be4c7d3b9b5a8c4
|
[
"Apache-2.0",
"MIT"
] | 530
|
2017-02-14T17:47:24.000Z
|
2022-03-09T18:45:09.000Z
|
artifact-test/interop_tests/source_invalid/src/two-b.py
|
eholk/artifact
|
ff4c72283aef12525727ca174c4be2876e5a5df6
|
[
"Apache-2.0",
"MIT"
] | 223
|
2017-02-14T15:58:35.000Z
|
2020-08-04T13:47:19.000Z
|
artifact-test/interop_tests/source_invalid/src/two-b.py
|
eholk/artifact
|
ff4c72283aef12525727ca174c4be2876e5a5df6
|
[
"Apache-2.0",
"MIT"
] | 51
|
2017-02-25T12:39:02.000Z
|
2022-02-28T06:35:25.000Z
|
# The other identical location
#SPC-two
#SPC-two.a
| 10.4
| 30
| 0.730769
| 9
| 52
| 4.222222
| 0.777778
| 0.315789
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.153846
| 52
| 4
| 31
| 13
| 0.863636
| 0.846154
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
37f956f62af149e1c8f42c81d2432237e90dc6bc
| 3,958
|
py
|
Python
|
Scrapy_zzuliacgn/pipelines.py
|
DeSireFire/zzuliacgnSyders
|
0e4d6b9663771d8ddc65598bae58a5b4b8c22e88
|
[
"MIT"
] | 2
|
2019-03-23T16:05:16.000Z
|
2021-04-19T02:14:09.000Z
|
Scrapy_zzuliacgn/pipelines.py
|
DeSireFire/zzuliacgnSyders
|
0e4d6b9663771d8ddc65598bae58a5b4b8c22e88
|
[
"MIT"
] | null | null | null |
Scrapy_zzuliacgn/pipelines.py
|
DeSireFire/zzuliacgnSyders
|
0e4d6b9663771d8ddc65598bae58a5b4b8c22e88
|
[
"MIT"
] | 1
|
2020-10-11T15:33:31.000Z
|
2020-10-11T15:33:31.000Z
|
# -*- coding: utf-8 -*-
import json,pymysql
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
class mysqlPipeline(object):
pass
# class mysqlPipeline(object):
# def __init__(self, host, database, user, password, port):
# self.host = host
# self.database = database
# self.user = user
# self.password = password
# self.port = port
#
# @classmethod
# def from_crawler(cls, crawler):
# return cls(
# host=crawler.settings.get('MYSQL_HOST'),
# database=crawler.settings.get('MYSQL_DATABASE'),
# user=crawler.settings.get('MYSQL_USER'),
# password=crawler.settings.get('MYSQL_PASSWORD'),
# port=crawler.settings.get('MYSQL_PORT'),
# )
#
# def open_spider(self,spider):
# self.db = pymysql.connect(self.host,self.user,self.password,self.database,self.port,charset='utf8',)
# self.cursor = self.db.cursor()
#
# def close_spider(self,spider):
# self.db.close()
#
# def process_item(self,item,spider):
# # return item
# # 如果爬虫名是movie
# if 'dmhy' in spider.name :
# print('老子是dmhy的管道,我感受到了力量')
# # print(item)
# # print(type(item))
# self.mysql_insert_update(item,'ZA_BT_items')
# elif spider.name == 'nyaa':
# print('老子是nyaa的管道,我感受到了力量')
# self.mysql_insert_IGNORE(item, 'ZA_BT_items')
# elif spider.name == 'wenku8':
# print('老子是wenku8的管道,我感受到了力量')
# if 'writer' in dict(item).keys():
# print('是小说主表')
# # todo 似乎不适合使用更新插入,待定
# self.mysql_insert_update(item, 'ZA_Novel_info')
# else:
# print('是小说章节表')
# self.mysql_insert_update(item, 'ZA_Novel_detail')
# else:
# print("我是谁,我在哪,我在做什么")
# return item
#
# def mysql_insert_update(self,item,tableName):
# '''
# 针对mysql复用的管道函数,存在就进行更新,不存在则插入新条目
# 注意: 数据库表中,必须存在有唯一约束的字段
# :param item:框架传递过来的item
# :param tableName:要存储到的表名
# :return:
# '''
# data = dict(item)
# # print(type(data))
# mykeys = ",".join(data.keys())
# myvalues = ",".join(['%s'] * len(data))
# myUpdate = ",".join([" {key} = %s".format(key=key) for key in data])+ ";"
# sql = "INSERT INTO {table}({keys}) VALUES ({values}) ON DUPLICATE KEY UPDATE".format(table=tableName,keys=mykeys,values=myvalues)
# # sql = "alter table {table} AUTO_INCREMENT=1;INSERT INTO {table}({keys}) VALUES ({values}) ON DUPLICATE KEY UPDATE".format(table=tableName,keys=mykeys,values=myvalues)
# sql += myUpdate
# try:
# if self.cursor.execute(sql, tuple(data.values()) * 2):
# print("更新成功!")
# self.db.commit()
# except Exception as e:
# print("更新数据 时发生错误:%s" % e)
#
# def mysql_insert_IGNORE(self,item,tableName):
# '''
# 针对mysql复用的管道函数,存在就进行更新,不存在则插入新条目
# 注意: 数据库表中,必须存在有唯一约束的字段
# :param item:框架传递过来的item
# :param tableName:要存储到的表名
# :return:
# '''
# data = dict(item)
# # # print(type(data))
# mykeys = ",".join(data.keys())
# myvalues = ",".join(['%s'] * len(data))
# # myUpdate = ",".join([" {key} = %s".format(key=key) for key in data])
# sql = "INSERT IGNORE INTO {table}({keys}) VALUES ({values})".format(table=tableName, keys=mykeys,values=myvalues)
# try:
# if self.cursor.execute(sql, tuple(data.values())):
# print("忽略中出成功!")
# self.cursor.commit()
# except Exception as e:
# print("忽略以存在数据插入 时发生错误:%s" % e)
# self.cursor.rollback()
| 38.057692
| 178
| 0.549267
| 426
| 3,958
| 5.021127
| 0.321596
| 0.030856
| 0.042076
| 0.053763
| 0.481533
| 0.449275
| 0.412342
| 0.336606
| 0.336606
| 0.299205
| 0
| 0.002156
| 0.296867
| 3,958
| 104
| 179
| 38.057692
| 0.766439
| 0.932542
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009615
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 5
|
530728b7ff97d99513c1dd7c8b2516d3b3f2db94
| 1,887
|
py
|
Python
|
tests/database/test_tables.py
|
NOWUM/EnSysMod
|
18c8a2198db3510e667c1f0298d00a3dfcb0aab7
|
[
"MIT"
] | 1
|
2021-12-10T19:41:01.000Z
|
2021-12-10T19:41:01.000Z
|
tests/database/test_tables.py
|
NOWUM/EnSysMod
|
18c8a2198db3510e667c1f0298d00a3dfcb0aab7
|
[
"MIT"
] | 83
|
2021-10-20T22:54:28.000Z
|
2022-03-24T19:07:06.000Z
|
tests/database/test_tables.py
|
NOWUM/EnSysMod
|
18c8a2198db3510e667c1f0298d00a3dfcb0aab7
|
[
"MIT"
] | null | null | null |
from sqlalchemy.orm import Session
def check_table_exists(cursor, table_name):
cursor.execute(f"SELECT * FROM sqlite_master WHERE type='table' AND name='{table_name}'")
return len(cursor.fetchall()) == 1
def test_table_user(db: Session):
assert check_table_exists(db.bind.raw_connection().cursor(), 'user')
def test_table_energy_conversion(db: Session):
assert check_table_exists(db.bind.raw_connection().cursor(), 'energy_conversion')
def test_table_energy_conversion_factor(db: Session):
assert check_table_exists(db.bind.raw_connection().cursor(), 'energy_conversion_factor')
def test_table_energy_sink(db: Session):
assert check_table_exists(db.bind.raw_connection().cursor(), 'energy_sink')
def test_table_energy_source(db: Session):
assert check_table_exists(db.bind.raw_connection().cursor(), 'energy_source')
def test_table_energy_storage(db: Session):
assert check_table_exists(db.bind.raw_connection().cursor(), 'energy_storage')
def test_table_energy_transmission(db: Session):
assert check_table_exists(db.bind.raw_connection().cursor(), 'energy_transmission')
def test_table_capacity_fix(db: Session):
assert check_table_exists(db.bind.raw_connection().cursor(), 'capacity_fix')
def test_table_capacity_max(db: Session):
assert check_table_exists(db.bind.raw_connection().cursor(), 'capacity_max')
def test_table_operation_rate_fix(db: Session):
assert check_table_exists(db.bind.raw_connection().cursor(), 'operation_rate_fix')
def test_table_operation_rate_max(db: Session):
assert check_table_exists(db.bind.raw_connection().cursor(), 'operation_rate_max')
def test_table_region(db: Session):
assert check_table_exists(db.bind.raw_connection().cursor(), 'region')
def test_table_energy_model(db: Session):
assert check_table_exists(db.bind.raw_connection().cursor(), 'energy_model')
| 31.983051
| 93
| 0.777954
| 267
| 1,887
| 5.131086
| 0.164794
| 0.10219
| 0.163504
| 0.189781
| 0.693431
| 0.616058
| 0.616058
| 0.616058
| 0.616058
| 0.616058
| 0
| 0.000588
| 0.098039
| 1,887
| 58
| 94
| 32.534483
| 0.804348
| 0
| 0
| 0
| 0
| 0
| 0.132485
| 0.012719
| 0
| 0
| 0
| 0
| 0.433333
| 1
| 0.466667
| false
| 0
| 0.033333
| 0
| 0.533333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
5327daed0c59a58dc66f8e2ccd081b87524bbb97
| 22
|
py
|
Python
|
members/views/userrole/__init__.py
|
louking/members
|
ee204211812e00945f9e2b09cfa130cc9d3e6558
|
[
"Apache-2.0"
] | 1
|
2020-12-07T02:52:01.000Z
|
2020-12-07T02:52:01.000Z
|
members/views/userrole/__init__.py
|
louking/members
|
ee204211812e00945f9e2b09cfa130cc9d3e6558
|
[
"Apache-2.0"
] | 496
|
2020-02-12T15:48:26.000Z
|
2022-03-23T11:17:27.000Z
|
members/views/userrole/__init__.py
|
louking/members
|
ee204211812e00945f9e2b09cfa130cc9d3e6558
|
[
"Apache-2.0"
] | null | null | null |
from . import userrole
| 22
| 22
| 0.818182
| 3
| 22
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.136364
| 22
| 1
| 22
| 22
| 0.947368
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
534ee2e7ee34f2bbf750f6437a55425c6775a294
| 1,011
|
py
|
Python
|
python/notification.py
|
HAYASAKA-Ryosuke/AdafruitLEDMatrixI2C
|
121b9a5b113ac2ab706471827664bd0aea43a0f1
|
[
"MIT"
] | null | null | null |
python/notification.py
|
HAYASAKA-Ryosuke/AdafruitLEDMatrixI2C
|
121b9a5b113ac2ab706471827664bd0aea43a0f1
|
[
"MIT"
] | null | null | null |
python/notification.py
|
HAYASAKA-Ryosuke/AdafruitLEDMatrixI2C
|
121b9a5b113ac2ab706471827664bd0aea43a0f1
|
[
"MIT"
] | null | null | null |
#!coding:utf-8
import serial
import time
def smile():
res = [
0, 0, 1, 1, 1, 1, 0, 0,
0, 1, 0, 0, 0, 0, 1, 0,
1, 0, 1, 0, 0, 1, 0, 1,
1, 0, 0, 0, 0, 0, 0, 1,
1, 0, 1, 0, 0, 1, 0, 1,
1, 0, 0, 1, 1, 0, 0, 1,
0, 1, 0, 0, 0, 0, 1, 0,
0, 0, 1, 1, 1, 1, 0, 0,
]
return res
def clear_window():
res = [
0, 0, 1, 1, 1, 1, 0, 0,
0, 1, 0, 0, 0, 0, 1, 0,
1, 0, 0, 0, 0, 0, 0, 1,
1, 0, 0, 0, 0, 0, 0, 1,
1, 0, 0, 0, 0, 0, 0, 1,
1, 0, 0, 0, 0, 0, 0, 1,
0, 1, 0, 0, 0, 0, 1, 0,
0, 0, 1, 1, 1, 1, 0, 0,
]
return res
if __name__ == '__main__':
ser = serial.Serial(
port='/dev/tty.usbmodem1411',
baudrate=38400,
)
print("smile")
ser.write(smile())
ser.close()
time.sleep(2)
ser = serial.Serial(
port='/dev/tty.usbmodem1411',
baudrate=38400,
)
ser.write(clear_window())
ser.close()
print("clear")
| 20.632653
| 37
| 0.399604
| 185
| 1,011
| 2.12973
| 0.156757
| 0.269036
| 0.243655
| 0.192893
| 0.639594
| 0.639594
| 0.634518
| 0.634518
| 0.634518
| 0.380711
| 0
| 0.244224
| 0.400593
| 1,011
| 48
| 38
| 21.0625
| 0.405941
| 0.012859
| 0
| 0.642857
| 0
| 0
| 0.060181
| 0.042126
| 0
| 0
| 0
| 0
| 0
| 1
| 0.047619
| false
| 0
| 0.047619
| 0
| 0.142857
| 0.047619
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
5351146884ce3aef9de739c0cd3d856362e1c53a
| 85
|
py
|
Python
|
tests/assets/check_mimetypes.py
|
baturayo/dploy-kickstart
|
2f58a780241032cfaacfa91f1d834db1c91c7abb
|
[
"MIT"
] | 6
|
2020-05-20T11:56:42.000Z
|
2020-11-03T16:24:36.000Z
|
tests/assets/check_mimetypes.py
|
baturayo/dploy-kickstart
|
2f58a780241032cfaacfa91f1d834db1c91c7abb
|
[
"MIT"
] | 9
|
2020-06-02T15:03:42.000Z
|
2020-11-12T11:55:48.000Z
|
tests/assets/check_mimetypes.py
|
baturayo/dploy-kickstart
|
2f58a780241032cfaacfa91f1d834db1c91c7abb
|
[
"MIT"
] | 3
|
2020-09-10T13:38:02.000Z
|
2020-10-01T16:36:48.000Z
|
# @dploy endpoint xyz
# @dploy json_to_kwargs
def xyz(a, b, c):
return a + b + c
| 17
| 23
| 0.623529
| 16
| 85
| 3.1875
| 0.6875
| 0.078431
| 0.117647
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.247059
| 85
| 4
| 24
| 21.25
| 0.796875
| 0.482353
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
725d218473c167231c5eba6b001efcb7b36c5985
| 915
|
py
|
Python
|
src/compas_rhino/geometry/curves/__init__.py
|
lottilotte/compas
|
7a1d67d2664e21cf6cf255b68e4f207439e63f6b
|
[
"MIT"
] | 235
|
2017-11-07T07:33:22.000Z
|
2022-03-25T16:20:00.000Z
|
src/compas_rhino/geometry/curves/__init__.py
|
lottilotte/compas
|
7a1d67d2664e21cf6cf255b68e4f207439e63f6b
|
[
"MIT"
] | 770
|
2017-09-22T13:42:06.000Z
|
2022-03-31T21:26:45.000Z
|
src/compas_rhino/geometry/curves/__init__.py
|
lottilotte/compas
|
7a1d67d2664e21cf6cf255b68e4f207439e63f6b
|
[
"MIT"
] | 99
|
2017-11-06T23:15:28.000Z
|
2022-03-25T16:05:36.000Z
|
from .nurbs import RhinoNurbsCurve
from compas.geometry import NurbsCurve
from compas.plugins import plugin
@plugin(category='factories', requires=['Rhino'])
def new_nurbscurve(*args, **kwargs):
return super(NurbsCurve, RhinoNurbsCurve).__new__(RhinoNurbsCurve)
@plugin(category='factories', requires=['Rhino'])
def new_nurbscurve_from_parameters(*args, **kwargs):
return RhinoNurbsCurve.from_parameters(*args, **kwargs)
@plugin(category='factories', requires=['Rhino'])
def new_nurbscurve_from_points(*args, **kwargs):
return RhinoNurbsCurve.from_points(*args, **kwargs)
@plugin(category='factories', requires=['Rhino'])
def new_nurbscurve_from_interpolation(*args, **kwargs):
return RhinoNurbsCurve.from_interpolation(*args, **kwargs)
@plugin(category='factories', requires=['Rhino'])
def new_nurbscurve_from_step(*args, **kwargs):
return RhinoNurbsCurve.from_step(*args, **kwargs)
| 30.5
| 70
| 0.765027
| 102
| 915
| 6.656863
| 0.22549
| 0.132548
| 0.169367
| 0.228277
| 0.656848
| 0.450663
| 0.450663
| 0.450663
| 0.37408
| 0.291605
| 0
| 0
| 0.09071
| 915
| 29
| 71
| 31.551724
| 0.816106
| 0
| 0
| 0.277778
| 0
| 0
| 0.076503
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.277778
| true
| 0
| 0.166667
| 0.277778
| 0.722222
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
726cd61ea12182a115024383da76f20bcee44d05
| 129
|
py
|
Python
|
FaceSwap-master/pytorch_stylegan_encoder/InterFaceGAN/models/pggan_tf_official/metrics/ms_ssim.py
|
CSID-DGU/-2020-1-OSSP1-ninetynine-2
|
b1824254882eeea0ee44e4e60896b72c51ef1d2c
|
[
"MIT"
] | 1
|
2020-06-21T13:45:26.000Z
|
2020-06-21T13:45:26.000Z
|
FaceSwap-master/pytorch_stylegan_encoder/InterFaceGAN/models/pggan_tf_official/metrics/ms_ssim.py
|
CSID-DGU/-2020-1-OSSP1-ninetynine-2
|
b1824254882eeea0ee44e4e60896b72c51ef1d2c
|
[
"MIT"
] | null | null | null |
FaceSwap-master/pytorch_stylegan_encoder/InterFaceGAN/models/pggan_tf_official/metrics/ms_ssim.py
|
CSID-DGU/-2020-1-OSSP1-ninetynine-2
|
b1824254882eeea0ee44e4e60896b72c51ef1d2c
|
[
"MIT"
] | 3
|
2020-09-02T03:18:45.000Z
|
2021-01-27T08:24:05.000Z
|
version https://git-lfs.github.com/spec/v1
oid sha256:0f17897e2d15324d0714486bea21390f09109d33844a15a7dc9ebd4fd57ad74b
size 8160
| 32.25
| 75
| 0.883721
| 13
| 129
| 8.769231
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.414634
| 0.046512
| 129
| 3
| 76
| 43
| 0.512195
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
727d416601086e9ef4c6e28c525af91066a1cf5f
| 357
|
py
|
Python
|
test/utils.py
|
medialab/pelote
|
cef80daeb19ef2fef73f8a1fcfc8477aa11bfb9a
|
[
"MIT"
] | 2
|
2022-03-07T20:00:10.000Z
|
2022-03-21T12:36:58.000Z
|
test/utils.py
|
medialab/pelote
|
cef80daeb19ef2fef73f8a1fcfc8477aa11bfb9a
|
[
"MIT"
] | 55
|
2022-03-02T16:19:30.000Z
|
2022-03-31T12:44:05.000Z
|
test/utils.py
|
medialab/pelote
|
cef80daeb19ef2fef73f8a1fcfc8477aa11bfb9a
|
[
"MIT"
] | null | null | null |
# =============================================================================
# Pelote Unit Test Utilities
# =============================================================================
from os.path import join, dirname
RESOURCES_DIR = join(dirname(__file__), "resources")
def get_resource_path(name: str) -> str:
return join(RESOURCES_DIR, name)
| 32.454545
| 79
| 0.414566
| 28
| 357
| 5
| 0.678571
| 0.157143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.092437
| 357
| 10
| 80
| 35.7
| 0.432099
| 0.509804
| 0
| 0
| 0
| 0
| 0.052632
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0.25
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
72872e163c7fa6d20ac7c178484f85fdf2f13aa8
| 20,916
|
py
|
Python
|
src/python/grpcio/grpc/_simple_stubs.py
|
uecasm/grpc
|
3ed237d4bb2ea66d1e9811b23c1a5ec860a4efe4
|
[
"Apache-2.0"
] | 1
|
2021-02-21T15:17:24.000Z
|
2021-02-21T15:17:24.000Z
|
src/python/grpcio/grpc/_simple_stubs.py
|
uecasm/grpc
|
3ed237d4bb2ea66d1e9811b23c1a5ec860a4efe4
|
[
"Apache-2.0"
] | null | null | null |
src/python/grpcio/grpc/_simple_stubs.py
|
uecasm/grpc
|
3ed237d4bb2ea66d1e9811b23c1a5ec860a4efe4
|
[
"Apache-2.0"
] | 1
|
2020-08-27T06:13:27.000Z
|
2020-08-27T06:13:27.000Z
|
# Copyright 2020 The gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Functions that obviate explicit stubs and explicit channels."""
import collections
import datetime
import os
import logging
import threading
from typing import (Any, AnyStr, Callable, Dict, Iterator, Optional, Sequence,
Tuple, TypeVar, Union)
import grpc
from grpc.experimental import experimental_api
RequestType = TypeVar('RequestType')
ResponseType = TypeVar('ResponseType')
OptionsType = Sequence[Tuple[str, str]]
CacheKey = Tuple[str, OptionsType, Optional[grpc.ChannelCredentials], Optional[
grpc.Compression]]
_LOGGER = logging.getLogger(__name__)
_EVICTION_PERIOD_KEY = "GRPC_PYTHON_MANAGED_CHANNEL_EVICTION_SECONDS"
if _EVICTION_PERIOD_KEY in os.environ:
_EVICTION_PERIOD = datetime.timedelta(
seconds=float(os.environ[_EVICTION_PERIOD_KEY]))
_LOGGER.debug("Setting managed channel eviction period to %s",
_EVICTION_PERIOD)
else:
_EVICTION_PERIOD = datetime.timedelta(minutes=10)
_MAXIMUM_CHANNELS_KEY = "GRPC_PYTHON_MANAGED_CHANNEL_MAXIMUM"
if _MAXIMUM_CHANNELS_KEY in os.environ:
_MAXIMUM_CHANNELS = int(os.environ[_MAXIMUM_CHANNELS_KEY])
_LOGGER.debug("Setting maximum managed channels to %d", _MAXIMUM_CHANNELS)
else:
_MAXIMUM_CHANNELS = 2**8
def _create_channel(target: str, options: Sequence[Tuple[str, str]],
channel_credentials: Optional[grpc.ChannelCredentials],
compression: Optional[grpc.Compression]) -> grpc.Channel:
channel_credentials = channel_credentials or grpc.local_channel_credentials(
)
if channel_credentials._credentials is grpc.experimental._insecure_channel_credentials:
_LOGGER.debug(f"Creating insecure channel with options '{options}' " +
f"and compression '{compression}'")
return grpc.insecure_channel(target,
options=options,
compression=compression)
else:
_LOGGER.debug(
f"Creating secure channel with credentials '{channel_credentials}', "
+ f"options '{options}' and compression '{compression}'")
return grpc.secure_channel(target,
credentials=channel_credentials,
options=options,
compression=compression)
class ChannelCache:
# NOTE(rbellevi): Untyped due to reference cycle.
_singleton = None
_lock: threading.RLock = threading.RLock()
_condition: threading.Condition = threading.Condition(lock=_lock)
_eviction_ready: threading.Event = threading.Event()
_mapping: Dict[CacheKey, Tuple[grpc.Channel, datetime.datetime]]
_eviction_thread: threading.Thread
def __init__(self):
self._mapping = collections.OrderedDict()
self._eviction_thread = threading.Thread(
target=ChannelCache._perform_evictions, daemon=True)
self._eviction_thread.start()
@staticmethod
def get():
with ChannelCache._lock:
if ChannelCache._singleton is None:
ChannelCache._singleton = ChannelCache()
ChannelCache._eviction_ready.wait()
return ChannelCache._singleton
def _evict_locked(self, key: CacheKey):
channel, _ = self._mapping.pop(key)
_LOGGER.debug("Evicting channel %s with configuration %s.", channel,
key)
channel.close()
del channel
@staticmethod
def _perform_evictions():
while True:
with ChannelCache._lock:
ChannelCache._eviction_ready.set()
if not ChannelCache._singleton._mapping:
ChannelCache._condition.wait()
elif len(ChannelCache._singleton._mapping) > _MAXIMUM_CHANNELS:
key = next(iter(ChannelCache._singleton._mapping.keys()))
ChannelCache._singleton._evict_locked(key)
# And immediately reevaluate.
else:
key, (_, eviction_time) = next(
iter(ChannelCache._singleton._mapping.items()))
now = datetime.datetime.now()
if eviction_time <= now:
ChannelCache._singleton._evict_locked(key)
continue
else:
time_to_eviction = (eviction_time - now).total_seconds()
# NOTE: We aim to *eventually* coalesce to a state in
# which no overdue channels are in the cache and the
# length of the cache is longer than _MAXIMUM_CHANNELS.
# We tolerate momentary states in which these two
# criteria are not met.
ChannelCache._condition.wait(timeout=time_to_eviction)
def get_channel(self, target: str, options: Sequence[Tuple[str, str]],
channel_credentials: Optional[grpc.ChannelCredentials],
compression: Optional[grpc.Compression]) -> grpc.Channel:
key = (target, options, channel_credentials, compression)
with self._lock:
channel_data = self._mapping.get(key, None)
if channel_data is not None:
channel = channel_data[0]
self._mapping.pop(key)
self._mapping[key] = (channel, datetime.datetime.now() +
_EVICTION_PERIOD)
return channel
else:
channel = _create_channel(target, options, channel_credentials,
compression)
self._mapping[key] = (channel, datetime.datetime.now() +
_EVICTION_PERIOD)
if len(self._mapping) == 1 or len(
self._mapping) >= _MAXIMUM_CHANNELS:
self._condition.notify()
return channel
def _test_only_channel_count(self) -> int:
with self._lock:
return len(self._mapping)
# TODO(rbellevi): Consider a credential type that has the
# following functionality matrix:
#
# +----------+-------+--------+
# | | local | remote |
# |----------+-------+--------+
# | secure | o | o |
# | insecure | o | x |
# +----------+-------+--------+
#
# Make this the default option.
@experimental_api
def unary_unary(
request: RequestType,
target: str,
method: str,
request_serializer: Optional[Callable[[Any], bytes]] = None,
request_deserializer: Optional[Callable[[bytes], Any]] = None,
options: Sequence[Tuple[AnyStr, AnyStr]] = (),
channel_credentials: Optional[grpc.ChannelCredentials] = None,
call_credentials: Optional[grpc.CallCredentials] = None,
compression: Optional[grpc.Compression] = None,
wait_for_ready: Optional[bool] = None,
timeout: Optional[float] = None,
metadata: Optional[Sequence[Tuple[str, Union[str, bytes]]]] = None
) -> ResponseType:
"""Invokes a unary-unary RPC without an explicitly specified channel.
THIS IS AN EXPERIMENTAL API.
This is backed by a per-process cache of channels. Channels are evicted
from the cache after a fixed period by a background. Channels will also be
evicted if more than a configured maximum accumulate.
The default eviction period is 10 minutes. One may set the environment
variable "GRPC_PYTHON_MANAGED_CHANNEL_EVICTION_SECONDS" to configure this.
The default maximum number of channels is 256. One may set the
environment variable "GRPC_PYTHON_MANAGED_CHANNEL_MAXIMUM" to configure
this.
Args:
request: An iterator that yields request values for the RPC.
target: The server address.
method: The name of the RPC method.
request_serializer: Optional behaviour for serializing the request
message. Request goes unserialized in case None is passed.
response_deserializer: Optional behaviour for deserializing the response
message. Response goes undeserialized in case None is passed.
options: An optional list of key-value pairs (channel args in gRPC Core
runtime) to configure the channel.
channel_credentials: A credential applied to the whole channel, e.g. the
return value of grpc.ssl_channel_credentials() or
grpc.insecure_channel_credentials().
call_credentials: A call credential applied to each call individually,
e.g. the output of grpc.metadata_call_credentials() or
grpc.access_token_call_credentials().
compression: An optional value indicating the compression method to be
used over the lifetime of the channel, e.g. grpc.Compression.Gzip.
wait_for_ready: An optional flag indicating whether the RPC should fail
immediately if the connection is not ready at the time the RPC is
invoked, or if it should wait until the connection to the server
becomes ready. When using this option, the user will likely also want
to set a timeout. Defaults to False.
timeout: An optional duration of time in seconds to allow for the RPC,
after which an exception will be raised.
metadata: Optional metadata to send to the server.
Returns:
The response to the RPC.
"""
channel = ChannelCache.get().get_channel(target, options,
channel_credentials, compression)
multicallable = channel.unary_unary(method, request_serializer,
request_deserializer)
return multicallable(request,
metadata=metadata,
wait_for_ready=wait_for_ready,
credentials=call_credentials,
timeout=timeout)
@experimental_api
def unary_stream(
request: RequestType,
target: str,
method: str,
request_serializer: Optional[Callable[[Any], bytes]] = None,
request_deserializer: Optional[Callable[[bytes], Any]] = None,
options: Sequence[Tuple[AnyStr, AnyStr]] = (),
channel_credentials: Optional[grpc.ChannelCredentials] = None,
call_credentials: Optional[grpc.CallCredentials] = None,
compression: Optional[grpc.Compression] = None,
wait_for_ready: Optional[bool] = None,
timeout: Optional[float] = None,
metadata: Optional[Sequence[Tuple[str, Union[str, bytes]]]] = None
) -> Iterator[ResponseType]:
"""Invokes a unary-stream RPC without an explicitly specified channel.
THIS IS AN EXPERIMENTAL API.
This is backed by a per-process cache of channels. Channels are evicted
from the cache after a fixed period by a background. Channels will also be
evicted if more than a configured maximum accumulate.
The default eviction period is 10 minutes. One may set the environment
variable "GRPC_PYTHON_MANAGED_CHANNEL_EVICTION_SECONDS" to configure this.
The default maximum number of channels is 256. One may set the
environment variable "GRPC_PYTHON_MANAGED_CHANNEL_MAXIMUM" to configure
this.
Args:
request: An iterator that yields request values for the RPC.
target: The server address.
method: The name of the RPC method.
request_serializer: Optional behaviour for serializing the request
message. Request goes unserialized in case None is passed.
response_deserializer: Optional behaviour for deserializing the response
message. Response goes undeserialized in case None is passed.
options: An optional list of key-value pairs (channel args in gRPC Core
runtime) to configure the channel.
channel_credentials: A credential applied to the whole channel, e.g. the
return value of grpc.ssl_channel_credentials().
call_credentials: A call credential applied to each call individually,
e.g. the output of grpc.metadata_call_credentials() or
grpc.access_token_call_credentials().
compression: An optional value indicating the compression method to be
used over the lifetime of the channel, e.g. grpc.Compression.Gzip.
wait_for_ready: An optional flag indicating whether the RPC should fail
immediately if the connection is not ready at the time the RPC is
invoked, or if it should wait until the connection to the server
becomes ready. When using this option, the user will likely also want
to set a timeout. Defaults to False.
timeout: An optional duration of time in seconds to allow for the RPC,
after which an exception will be raised.
metadata: Optional metadata to send to the server.
Returns:
An iterator of responses.
"""
channel = ChannelCache.get().get_channel(target, options,
channel_credentials, compression)
multicallable = channel.unary_stream(method, request_serializer,
request_deserializer)
return multicallable(request,
metadata=metadata,
wait_for_ready=wait_for_ready,
credentials=call_credentials,
timeout=timeout)
@experimental_api
def stream_unary(
request_iterator: Iterator[RequestType],
target: str,
method: str,
request_serializer: Optional[Callable[[Any], bytes]] = None,
request_deserializer: Optional[Callable[[bytes], Any]] = None,
options: Sequence[Tuple[AnyStr, AnyStr]] = (),
channel_credentials: Optional[grpc.ChannelCredentials] = None,
call_credentials: Optional[grpc.CallCredentials] = None,
compression: Optional[grpc.Compression] = None,
wait_for_ready: Optional[bool] = None,
timeout: Optional[float] = None,
metadata: Optional[Sequence[Tuple[str, Union[str, bytes]]]] = None
) -> ResponseType:
"""Invokes a stream-unary RPC without an explicitly specified channel.
THIS IS AN EXPERIMENTAL API.
This is backed by a per-process cache of channels. Channels are evicted
from the cache after a fixed period by a background. Channels will also be
evicted if more than a configured maximum accumulate.
The default eviction period is 10 minutes. One may set the environment
variable "GRPC_PYTHON_MANAGED_CHANNEL_EVICTION_SECONDS" to configure this.
The default maximum number of channels is 256. One may set the
environment variable "GRPC_PYTHON_MANAGED_CHANNEL_MAXIMUM" to configure
this.
Args:
request_iterator: An iterator that yields request values for the RPC.
target: The server address.
method: The name of the RPC method.
request_serializer: Optional behaviour for serializing the request
message. Request goes unserialized in case None is passed.
response_deserializer: Optional behaviour for deserializing the response
message. Response goes undeserialized in case None is passed.
options: An optional list of key-value pairs (channel args in gRPC Core
runtime) to configure the channel.
channel_credentials: A credential applied to the whole channel, e.g. the
return value of grpc.ssl_channel_credentials().
call_credentials: A call credential applied to each call individually,
e.g. the output of grpc.metadata_call_credentials() or
grpc.access_token_call_credentials().
compression: An optional value indicating the compression method to be
used over the lifetime of the channel, e.g. grpc.Compression.Gzip.
wait_for_ready: An optional flag indicating whether the RPC should fail
immediately if the connection is not ready at the time the RPC is
invoked, or if it should wait until the connection to the server
becomes ready. When using this option, the user will likely also want
to set a timeout. Defaults to False.
timeout: An optional duration of time in seconds to allow for the RPC,
after which an exception will be raised.
metadata: Optional metadata to send to the server.
Returns:
The response to the RPC.
"""
channel = ChannelCache.get().get_channel(target, options,
channel_credentials, compression)
multicallable = channel.stream_unary(method, request_serializer,
request_deserializer)
return multicallable(request_iterator,
metadata=metadata,
wait_for_ready=wait_for_ready,
credentials=call_credentials,
timeout=timeout)
@experimental_api
def stream_stream(
request_iterator: Iterator[RequestType],
target: str,
method: str,
request_serializer: Optional[Callable[[Any], bytes]] = None,
request_deserializer: Optional[Callable[[bytes], Any]] = None,
options: Sequence[Tuple[AnyStr, AnyStr]] = (),
channel_credentials: Optional[grpc.ChannelCredentials] = None,
call_credentials: Optional[grpc.CallCredentials] = None,
compression: Optional[grpc.Compression] = None,
wait_for_ready: Optional[bool] = None,
timeout: Optional[float] = None,
metadata: Optional[Sequence[Tuple[str, Union[str, bytes]]]] = None
) -> Iterator[ResponseType]:
"""Invokes a stream-stream RPC without an explicitly specified channel.
THIS IS AN EXPERIMENTAL API.
This is backed by a per-process cache of channels. Channels are evicted
from the cache after a fixed period by a background. Channels will also be
evicted if more than a configured maximum accumulate.
The default eviction period is 10 minutes. One may set the environment
variable "GRPC_PYTHON_MANAGED_CHANNEL_EVICTION_SECONDS" to configure this.
The default maximum number of channels is 256. One may set the
environment variable "GRPC_PYTHON_MANAGED_CHANNEL_MAXIMUM" to configure
this.
Args:
request_iterator: An iterator that yields request values for the RPC.
target: The server address.
method: The name of the RPC method.
request_serializer: Optional behaviour for serializing the request
message. Request goes unserialized in case None is passed.
response_deserializer: Optional behaviour for deserializing the response
message. Response goes undeserialized in case None is passed.
options: An optional list of key-value pairs (channel args in gRPC Core
runtime) to configure the channel.
channel_credentials: A credential applied to the whole channel, e.g. the
return value of grpc.ssl_channel_credentials().
call_credentials: A call credential applied to each call individually,
e.g. the output of grpc.metadata_call_credentials() or
grpc.access_token_call_credentials().
compression: An optional value indicating the compression method to be
used over the lifetime of the channel, e.g. grpc.Compression.Gzip.
wait_for_ready: An optional flag indicating whether the RPC should fail
immediately if the connection is not ready at the time the RPC is
invoked, or if it should wait until the connection to the server
becomes ready. When using this option, the user will likely also want
to set a timeout. Defaults to False.
timeout: An optional duration of time in seconds to allow for the RPC,
after which an exception will be raised.
metadata: Optional metadata to send to the server.
Returns:
An iterator of responses.
"""
channel = ChannelCache.get().get_channel(target, options,
channel_credentials, compression)
multicallable = channel.stream_stream(method, request_serializer,
request_deserializer)
return multicallable(request_iterator,
metadata=metadata,
wait_for_ready=wait_for_ready,
credentials=call_credentials,
timeout=timeout)
| 46.37694
| 91
| 0.663224
| 2,415
| 20,916
| 5.608696
| 0.129607
| 0.037209
| 0.014175
| 0.017719
| 0.742193
| 0.720709
| 0.714212
| 0.714212
| 0.714212
| 0.704614
| 0
| 0.002228
| 0.270463
| 20,916
| 450
| 92
| 46.48
| 0.885445
| 0.462373
| 0
| 0.549296
| 0
| 0
| 0.04023
| 0.009727
| 0
| 0
| 0
| 0.002222
| 0
| 1
| 0.051643
| false
| 0
| 0.037559
| 0
| 0.169014
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
7298649b5d0c773b1a929c49948b4baa13a5dded
| 137
|
py
|
Python
|
mmseg/models/generator_head/__init__.py
|
jacksonhzx95/Joint_segmentation_denoise_for_scoliosis
|
7b3b850452687601ae58d5cee6fc2324f420fda9
|
[
"Apache-2.0"
] | 2
|
2022-02-05T00:57:24.000Z
|
2022-02-14T08:56:34.000Z
|
mmseg/models/generator_head/__init__.py
|
jacksonhzx95/Joint_segmentation_denoise_for_scoliosis
|
7b3b850452687601ae58d5cee6fc2324f420fda9
|
[
"Apache-2.0"
] | null | null | null |
mmseg/models/generator_head/__init__.py
|
jacksonhzx95/Joint_segmentation_denoise_for_scoliosis
|
7b3b850452687601ae58d5cee6fc2324f420fda9
|
[
"Apache-2.0"
] | 1
|
2021-10-10T08:38:25.000Z
|
2021-10-10T08:38:25.000Z
|
from .base import BaseGHead, GHead_no_in
from .single_conv_head import SingleGHead
__all__ = ['BaseGHead', 'SingleGHead', 'GHead_no_in']
| 34.25
| 53
| 0.79562
| 19
| 137
| 5.210526
| 0.631579
| 0.141414
| 0.181818
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.10219
| 137
| 4
| 53
| 34.25
| 0.804878
| 0
| 0
| 0
| 0
| 0
| 0.224638
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
729bf62f05b986a1bd0e2ef86b604c02857cdc04
| 211
|
py
|
Python
|
reddit2telegram/channels/r_movieclub/app.py
|
mainyordle/reddit2telegram
|
1163e15aed3b6ff0fba65b222d3d9798f644c386
|
[
"MIT"
] | 187
|
2016-09-20T09:15:54.000Z
|
2022-03-29T12:22:33.000Z
|
reddit2telegram/channels/r_movieclub/app.py
|
mainyordle/reddit2telegram
|
1163e15aed3b6ff0fba65b222d3d9798f644c386
|
[
"MIT"
] | 84
|
2016-09-22T14:25:07.000Z
|
2022-03-19T01:26:17.000Z
|
reddit2telegram/channels/r_movieclub/app.py
|
mainyordle/reddit2telegram
|
1163e15aed3b6ff0fba65b222d3d9798f644c386
|
[
"MIT"
] | 172
|
2016-09-21T15:39:39.000Z
|
2022-03-16T15:15:58.000Z
|
#encoding:utf-8
subreddit = 'netflixbestof+bestofnetflix+movie_club+truefilm+shittymoviedetails+ijustwatched'
t_channel = '@r_movieclub'
def send_post(submission, r2t):
return r2t.send_simple(submission)
| 23.444444
| 93
| 0.805687
| 25
| 211
| 6.6
| 0.88
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.015625
| 0.090047
| 211
| 8
| 94
| 26.375
| 0.84375
| 0.066351
| 0
| 0
| 0
| 0
| 0.464286
| 0.403061
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0
| 0.25
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 5
|
72aa35db3cd57ae9bf4e1fbe35d6885f091d3e23
| 707
|
py
|
Python
|
src/deepproblog/networks_evolution_collector.py
|
Joshua-Schroijen/deepproblog
|
4ae56f1e860010b7857b29d5bd76fb1555d5e19d
|
[
"Apache-2.0"
] | null | null | null |
src/deepproblog/networks_evolution_collector.py
|
Joshua-Schroijen/deepproblog
|
4ae56f1e860010b7857b29d5bd76fb1555d5e19d
|
[
"Apache-2.0"
] | null | null | null |
src/deepproblog/networks_evolution_collector.py
|
Joshua-Schroijen/deepproblog
|
4ae56f1e860010b7857b29d5bd76fb1555d5e19d
|
[
"Apache-2.0"
] | null | null | null |
from typing import Collection
from abc import ABC, abstractmethod
from .network import Network
class NetworksEvolutionCollector(ABC):
@abstractmethod
def collect_before_training(self, networks: Collection[Network]):
pass
@abstractmethod
def collect_before_epoch(self, networks: Collection[Network]):
pass
@abstractmethod
def collect_before_iteration(self, networks: Collection[Network]):
pass
@abstractmethod
def collect_after_iteration(self, networks: Collection[Network]):
pass
@abstractmethod
def collect_after_epoch(self, networks: Collection[Network]):
pass
@abstractmethod
def collect_after_training(self, networks: Collection[Network]):
pass
| 24.37931
| 68
| 0.770863
| 76
| 707
| 7.013158
| 0.25
| 0.19137
| 0.270169
| 0.326454
| 0.729831
| 0.729831
| 0.637899
| 0.637899
| 0.637899
| 0.266417
| 0
| 0
| 0.154173
| 707
| 29
| 69
| 24.37931
| 0.891304
| 0
| 0
| 0.545455
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.272727
| false
| 0.272727
| 0.136364
| 0
| 0.454545
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
72c79a5d6cabfec1fb8ca130bf22b96451af1da8
| 233
|
py
|
Python
|
tests/test_swaggerdefs.py
|
pjz/etcd3-py
|
b3d1d2c7eab436a7adeb1e6b36d357da08611f9c
|
[
"Apache-2.0"
] | 96
|
2018-03-20T03:42:25.000Z
|
2022-03-20T13:29:14.000Z
|
tests/test_swaggerdefs.py
|
pjz/etcd3-py
|
b3d1d2c7eab436a7adeb1e6b36d357da08611f9c
|
[
"Apache-2.0"
] | 148
|
2018-01-26T08:32:52.000Z
|
2022-03-25T11:40:42.000Z
|
tests/test_swaggerdefs.py
|
pjz/etcd3-py
|
b3d1d2c7eab436a7adeb1e6b36d357da08611f9c
|
[
"Apache-2.0"
] | 20
|
2018-03-30T04:40:25.000Z
|
2022-03-18T16:10:20.000Z
|
from etcd3.swaggerdefs import get_spec
def test_swagger_spec():
assert get_spec()
assert get_spec('3.0.1')
assert get_spec('3.1.1')
assert get_spec('3.2.1')
assert get_spec('3.3.1')
assert get_spec('3.4.1')
| 21.181818
| 38
| 0.656652
| 43
| 233
| 3.348837
| 0.348837
| 0.340278
| 0.541667
| 0.486111
| 0.416667
| 0
| 0
| 0
| 0
| 0
| 0
| 0.084656
| 0.188841
| 233
| 10
| 39
| 23.3
| 0.677249
| 0
| 0
| 0
| 0
| 0
| 0.107296
| 0
| 0
| 0
| 0
| 0
| 0.75
| 1
| 0.125
| true
| 0
| 0.125
| 0
| 0.25
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
f404b8188d9d7f6f8799f9922564387dfd11313a
| 157
|
py
|
Python
|
symarray/symarray/calculus/__init__.py
|
costrouc/uarray
|
c3c42147181a88265942ad5f9cf439467f746782
|
[
"BSD-3-Clause"
] | null | null | null |
symarray/symarray/calculus/__init__.py
|
costrouc/uarray
|
c3c42147181a88265942ad5f9cf439467f746782
|
[
"BSD-3-Clause"
] | null | null | null |
symarray/symarray/calculus/__init__.py
|
costrouc/uarray
|
c3c42147181a88265942ad5f9cf439467f746782
|
[
"BSD-3-Clause"
] | null | null | null |
from .base import BaseCalculus, BaseAtom, BaseTerms, BaseFactors, BaseComposite, BaseComponent
from .integers import Int, Integer
from .arrays import Array
| 31.4
| 94
| 0.821656
| 18
| 157
| 7.166667
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.121019
| 157
| 4
| 95
| 39.25
| 0.934783
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
f40f61c512f1b30b01415ae9d3d158083b12acaf
| 90
|
py
|
Python
|
backend/noticias/admin.py
|
ES2-UFPI/404-portal
|
ac673d341a5a215441859fcd6184ff1e22a3fab4
|
[
"Apache-2.0"
] | 1
|
2019-03-21T19:53:55.000Z
|
2019-03-21T19:53:55.000Z
|
backend/noticias/admin.py
|
ES2-UFPI/404-portal
|
ac673d341a5a215441859fcd6184ff1e22a3fab4
|
[
"Apache-2.0"
] | 46
|
2019-03-28T14:34:19.000Z
|
2021-09-22T19:02:11.000Z
|
backend/noticias/admin.py
|
ES2-UFPI/404-portal
|
ac673d341a5a215441859fcd6184ff1e22a3fab4
|
[
"Apache-2.0"
] | 1
|
2022-02-17T16:51:04.000Z
|
2022-02-17T16:51:04.000Z
|
from django.contrib import admin
from .models import Noticia
admin.site.register(Noticia)
| 22.5
| 32
| 0.833333
| 13
| 90
| 5.769231
| 0.692308
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1
| 90
| 4
| 33
| 22.5
| 0.925926
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
f4599c66957414c036fc60cac454425f44e6bbb2
| 172
|
py
|
Python
|
web-api/models/ConvertionConfigModel.py
|
nafis-sadik/voynich-converter
|
cbac552ade6cc45e206095e07599188d7edde70c
|
[
"MIT"
] | null | null | null |
web-api/models/ConvertionConfigModel.py
|
nafis-sadik/voynich-converter
|
cbac552ade6cc45e206095e07599188d7edde70c
|
[
"MIT"
] | null | null | null |
web-api/models/ConvertionConfigModel.py
|
nafis-sadik/voynich-converter
|
cbac552ade6cc45e206095e07599188d7edde70c
|
[
"MIT"
] | null | null | null |
from typing import Optional
from pydantic import BaseModel
class ConvertionConfigModel(BaseModel):
url: str
start_time: Optional[int]
end_time: Optional[int]
| 19.111111
| 39
| 0.767442
| 21
| 172
| 6.190476
| 0.666667
| 0.184615
| 0.230769
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.174419
| 172
| 9
| 40
| 19.111111
| 0.915493
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
be31ae9a1c4a4b46cbb92ee6c3e1699e5fd61530
| 68,420
|
py
|
Python
|
sdk/python/pulumi_google_native/compute/beta/instance.py
|
AaronFriel/pulumi-google-native
|
75d1cda425e33d4610348972cd70bddf35f1770d
|
[
"Apache-2.0"
] | 44
|
2021-04-18T23:00:48.000Z
|
2022-02-14T17:43:15.000Z
|
sdk/python/pulumi_google_native/compute/beta/instance.py
|
AaronFriel/pulumi-google-native
|
75d1cda425e33d4610348972cd70bddf35f1770d
|
[
"Apache-2.0"
] | 354
|
2021-04-16T16:48:39.000Z
|
2022-03-31T17:16:39.000Z
|
sdk/python/pulumi_google_native/compute/beta/instance.py
|
AaronFriel/pulumi-google-native
|
75d1cda425e33d4610348972cd70bddf35f1770d
|
[
"Apache-2.0"
] | 8
|
2021-04-24T17:46:51.000Z
|
2022-01-05T10:40:21.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
from ._enums import *
from ._inputs import *
__all__ = ['InstanceArgs', 'Instance']
@pulumi.input_type
class InstanceArgs:
def __init__(__self__, *,
advanced_machine_features: Optional[pulumi.Input['AdvancedMachineFeaturesArgs']] = None,
can_ip_forward: Optional[pulumi.Input[bool]] = None,
confidential_instance_config: Optional[pulumi.Input['ConfidentialInstanceConfigArgs']] = None,
deletion_protection: Optional[pulumi.Input[bool]] = None,
description: Optional[pulumi.Input[str]] = None,
disks: Optional[pulumi.Input[Sequence[pulumi.Input['AttachedDiskArgs']]]] = None,
display_device: Optional[pulumi.Input['DisplayDeviceArgs']] = None,
erase_windows_vss_signature: Optional[pulumi.Input[bool]] = None,
guest_accelerators: Optional[pulumi.Input[Sequence[pulumi.Input['AcceleratorConfigArgs']]]] = None,
hostname: Optional[pulumi.Input[str]] = None,
labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
machine_type: Optional[pulumi.Input[str]] = None,
metadata: Optional[pulumi.Input['MetadataArgs']] = None,
min_cpu_platform: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
network_interfaces: Optional[pulumi.Input[Sequence[pulumi.Input['NetworkInterfaceArgs']]]] = None,
network_performance_config: Optional[pulumi.Input['NetworkPerformanceConfigArgs']] = None,
params: Optional[pulumi.Input['InstanceParamsArgs']] = None,
post_key_revocation_action_type: Optional[pulumi.Input['InstancePostKeyRevocationActionType']] = None,
private_ipv6_google_access: Optional[pulumi.Input['InstancePrivateIpv6GoogleAccess']] = None,
project: Optional[pulumi.Input[str]] = None,
request_id: Optional[pulumi.Input[str]] = None,
reservation_affinity: Optional[pulumi.Input['ReservationAffinityArgs']] = None,
resource_policies: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
scheduling: Optional[pulumi.Input['SchedulingArgs']] = None,
service_accounts: Optional[pulumi.Input[Sequence[pulumi.Input['ServiceAccountArgs']]]] = None,
shielded_instance_config: Optional[pulumi.Input['ShieldedInstanceConfigArgs']] = None,
shielded_instance_integrity_policy: Optional[pulumi.Input['ShieldedInstanceIntegrityPolicyArgs']] = None,
shielded_vm_config: Optional[pulumi.Input['ShieldedVmConfigArgs']] = None,
shielded_vm_integrity_policy: Optional[pulumi.Input['ShieldedVmIntegrityPolicyArgs']] = None,
source_instance_template: Optional[pulumi.Input[str]] = None,
source_machine_image: Optional[pulumi.Input[str]] = None,
source_machine_image_encryption_key: Optional[pulumi.Input['CustomerEncryptionKeyArgs']] = None,
tags: Optional[pulumi.Input['TagsArgs']] = None,
zone: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a Instance resource.
:param pulumi.Input['AdvancedMachineFeaturesArgs'] advanced_machine_features: Controls for advanced machine-related behavior features.
:param pulumi.Input[bool] can_ip_forward: Allows this instance to send and receive packets with non-matching destination or source IPs. This is required if you plan to use this instance to forward routes. For more information, see Enabling IP Forwarding .
:param pulumi.Input[bool] deletion_protection: Whether the resource should be protected against deletion.
:param pulumi.Input[str] description: An optional description of this resource. Provide this property when you create the resource.
:param pulumi.Input[Sequence[pulumi.Input['AttachedDiskArgs']]] disks: Array of disks associated with this instance. Persistent disks must be created before you can assign them.
:param pulumi.Input['DisplayDeviceArgs'] display_device: Enables display device for the instance.
:param pulumi.Input[bool] erase_windows_vss_signature: Specifies whether the disks restored from source snapshots or source machine image should erase Windows specific VSS signature.
:param pulumi.Input[Sequence[pulumi.Input['AcceleratorConfigArgs']]] guest_accelerators: A list of the type and count of accelerator cards attached to the instance.
:param pulumi.Input[str] hostname: Specifies the hostname of the instance. The specified hostname must be RFC1035 compliant. If hostname is not specified, the default hostname is [INSTANCE_NAME].c.[PROJECT_ID].internal when using the global DNS, and [INSTANCE_NAME].[ZONE].c.[PROJECT_ID].internal when using zonal DNS.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] labels: Labels to apply to this instance. These can be later modified by the setLabels method.
:param pulumi.Input[str] machine_type: Full or partial URL of the machine type resource to use for this instance, in the format: zones/zone/machineTypes/machine-type. This is provided by the client when the instance is created. For example, the following is a valid partial url to a predefined machine type: zones/us-central1-f/machineTypes/n1-standard-1 To create a custom machine type, provide a URL to a machine type in the following format, where CPUS is 1 or an even number up to 32 (2, 4, 6, ... 24, etc), and MEMORY is the total memory for this instance. Memory must be a multiple of 256 MB and must be supplied in MB (e.g. 5 GB of memory is 5120 MB): zones/zone/machineTypes/custom-CPUS-MEMORY For example: zones/us-central1-f/machineTypes/custom-4-5120 For a full list of restrictions, read the Specifications for custom machine types.
:param pulumi.Input['MetadataArgs'] metadata: The metadata key/value pairs assigned to this instance. This includes custom metadata and predefined keys.
:param pulumi.Input[str] min_cpu_platform: Specifies a minimum CPU platform for the VM instance. Applicable values are the friendly names of CPU platforms, such as minCpuPlatform: "Intel Haswell" or minCpuPlatform: "Intel Sandy Bridge".
:param pulumi.Input[str] name: The name of the resource, provided by the client when initially creating the resource. The resource name must be 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash.
:param pulumi.Input[Sequence[pulumi.Input['NetworkInterfaceArgs']]] network_interfaces: An array of network configurations for this instance. These specify how interfaces are configured to interact with other network services, such as connecting to the internet. Multiple interfaces are supported per instance.
:param pulumi.Input['InstanceParamsArgs'] params: Input only. [Input Only] Additional params passed with the request, but not persisted as part of resource payload.
:param pulumi.Input['InstancePostKeyRevocationActionType'] post_key_revocation_action_type: PostKeyRevocationActionType of the instance.
:param pulumi.Input['InstancePrivateIpv6GoogleAccess'] private_ipv6_google_access: The private IPv6 google access type for the VM. If not specified, use INHERIT_FROM_SUBNETWORK as default.
:param pulumi.Input['ReservationAffinityArgs'] reservation_affinity: Specifies the reservations that this instance can consume from.
:param pulumi.Input[Sequence[pulumi.Input[str]]] resource_policies: Resource policies applied to this instance.
:param pulumi.Input['SchedulingArgs'] scheduling: Sets the scheduling options for this instance.
:param pulumi.Input[Sequence[pulumi.Input['ServiceAccountArgs']]] service_accounts: A list of service accounts, with their specified scopes, authorized for this instance. Only one service account per VM instance is supported. Service accounts generate access tokens that can be accessed through the metadata server and used to authenticate applications on the instance. See Service Accounts for more information.
:param pulumi.Input['ShieldedVmConfigArgs'] shielded_vm_config: Deprecating, please use shielded_instance_config.
:param pulumi.Input['ShieldedVmIntegrityPolicyArgs'] shielded_vm_integrity_policy: Deprecating, please use shielded_instance_integrity_policy.
:param pulumi.Input[str] source_machine_image: Source machine image
:param pulumi.Input['CustomerEncryptionKeyArgs'] source_machine_image_encryption_key: Source machine image encryption key when creating an instance from a machine image.
:param pulumi.Input['TagsArgs'] tags: Tags to apply to this instance. Tags are used to identify valid sources or targets for network firewalls and are specified by the client during instance creation. The tags can be later modified by the setTags method. Each tag within the list must comply with RFC1035. Multiple tags can be specified via the 'tags.items' field.
"""
if advanced_machine_features is not None:
pulumi.set(__self__, "advanced_machine_features", advanced_machine_features)
if can_ip_forward is not None:
pulumi.set(__self__, "can_ip_forward", can_ip_forward)
if confidential_instance_config is not None:
pulumi.set(__self__, "confidential_instance_config", confidential_instance_config)
if deletion_protection is not None:
pulumi.set(__self__, "deletion_protection", deletion_protection)
if description is not None:
pulumi.set(__self__, "description", description)
if disks is not None:
pulumi.set(__self__, "disks", disks)
if display_device is not None:
pulumi.set(__self__, "display_device", display_device)
if erase_windows_vss_signature is not None:
pulumi.set(__self__, "erase_windows_vss_signature", erase_windows_vss_signature)
if guest_accelerators is not None:
pulumi.set(__self__, "guest_accelerators", guest_accelerators)
if hostname is not None:
pulumi.set(__self__, "hostname", hostname)
if labels is not None:
pulumi.set(__self__, "labels", labels)
if machine_type is not None:
pulumi.set(__self__, "machine_type", machine_type)
if metadata is not None:
pulumi.set(__self__, "metadata", metadata)
if min_cpu_platform is not None:
pulumi.set(__self__, "min_cpu_platform", min_cpu_platform)
if name is not None:
pulumi.set(__self__, "name", name)
if network_interfaces is not None:
pulumi.set(__self__, "network_interfaces", network_interfaces)
if network_performance_config is not None:
pulumi.set(__self__, "network_performance_config", network_performance_config)
if params is not None:
pulumi.set(__self__, "params", params)
if post_key_revocation_action_type is not None:
pulumi.set(__self__, "post_key_revocation_action_type", post_key_revocation_action_type)
if private_ipv6_google_access is not None:
pulumi.set(__self__, "private_ipv6_google_access", private_ipv6_google_access)
if project is not None:
pulumi.set(__self__, "project", project)
if request_id is not None:
pulumi.set(__self__, "request_id", request_id)
if reservation_affinity is not None:
pulumi.set(__self__, "reservation_affinity", reservation_affinity)
if resource_policies is not None:
pulumi.set(__self__, "resource_policies", resource_policies)
if scheduling is not None:
pulumi.set(__self__, "scheduling", scheduling)
if service_accounts is not None:
pulumi.set(__self__, "service_accounts", service_accounts)
if shielded_instance_config is not None:
pulumi.set(__self__, "shielded_instance_config", shielded_instance_config)
if shielded_instance_integrity_policy is not None:
pulumi.set(__self__, "shielded_instance_integrity_policy", shielded_instance_integrity_policy)
if shielded_vm_config is not None:
pulumi.set(__self__, "shielded_vm_config", shielded_vm_config)
if shielded_vm_integrity_policy is not None:
pulumi.set(__self__, "shielded_vm_integrity_policy", shielded_vm_integrity_policy)
if source_instance_template is not None:
pulumi.set(__self__, "source_instance_template", source_instance_template)
if source_machine_image is not None:
pulumi.set(__self__, "source_machine_image", source_machine_image)
if source_machine_image_encryption_key is not None:
pulumi.set(__self__, "source_machine_image_encryption_key", source_machine_image_encryption_key)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if zone is not None:
pulumi.set(__self__, "zone", zone)
@property
@pulumi.getter(name="advancedMachineFeatures")
def advanced_machine_features(self) -> Optional[pulumi.Input['AdvancedMachineFeaturesArgs']]:
"""
Controls for advanced machine-related behavior features.
"""
return pulumi.get(self, "advanced_machine_features")
@advanced_machine_features.setter
def advanced_machine_features(self, value: Optional[pulumi.Input['AdvancedMachineFeaturesArgs']]):
pulumi.set(self, "advanced_machine_features", value)
@property
@pulumi.getter(name="canIpForward")
def can_ip_forward(self) -> Optional[pulumi.Input[bool]]:
"""
Allows this instance to send and receive packets with non-matching destination or source IPs. This is required if you plan to use this instance to forward routes. For more information, see Enabling IP Forwarding .
"""
return pulumi.get(self, "can_ip_forward")
@can_ip_forward.setter
def can_ip_forward(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "can_ip_forward", value)
@property
@pulumi.getter(name="confidentialInstanceConfig")
def confidential_instance_config(self) -> Optional[pulumi.Input['ConfidentialInstanceConfigArgs']]:
return pulumi.get(self, "confidential_instance_config")
@confidential_instance_config.setter
def confidential_instance_config(self, value: Optional[pulumi.Input['ConfidentialInstanceConfigArgs']]):
pulumi.set(self, "confidential_instance_config", value)
@property
@pulumi.getter(name="deletionProtection")
def deletion_protection(self) -> Optional[pulumi.Input[bool]]:
"""
Whether the resource should be protected against deletion.
"""
return pulumi.get(self, "deletion_protection")
@deletion_protection.setter
def deletion_protection(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "deletion_protection", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
An optional description of this resource. Provide this property when you create the resource.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter
def disks(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['AttachedDiskArgs']]]]:
"""
Array of disks associated with this instance. Persistent disks must be created before you can assign them.
"""
return pulumi.get(self, "disks")
@disks.setter
def disks(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['AttachedDiskArgs']]]]):
pulumi.set(self, "disks", value)
@property
@pulumi.getter(name="displayDevice")
def display_device(self) -> Optional[pulumi.Input['DisplayDeviceArgs']]:
"""
Enables display device for the instance.
"""
return pulumi.get(self, "display_device")
@display_device.setter
def display_device(self, value: Optional[pulumi.Input['DisplayDeviceArgs']]):
pulumi.set(self, "display_device", value)
@property
@pulumi.getter(name="eraseWindowsVssSignature")
def erase_windows_vss_signature(self) -> Optional[pulumi.Input[bool]]:
"""
Specifies whether the disks restored from source snapshots or source machine image should erase Windows specific VSS signature.
"""
return pulumi.get(self, "erase_windows_vss_signature")
@erase_windows_vss_signature.setter
def erase_windows_vss_signature(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "erase_windows_vss_signature", value)
@property
@pulumi.getter(name="guestAccelerators")
def guest_accelerators(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['AcceleratorConfigArgs']]]]:
"""
A list of the type and count of accelerator cards attached to the instance.
"""
return pulumi.get(self, "guest_accelerators")
@guest_accelerators.setter
def guest_accelerators(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['AcceleratorConfigArgs']]]]):
pulumi.set(self, "guest_accelerators", value)
@property
@pulumi.getter
def hostname(self) -> Optional[pulumi.Input[str]]:
"""
Specifies the hostname of the instance. The specified hostname must be RFC1035 compliant. If hostname is not specified, the default hostname is [INSTANCE_NAME].c.[PROJECT_ID].internal when using the global DNS, and [INSTANCE_NAME].[ZONE].c.[PROJECT_ID].internal when using zonal DNS.
"""
return pulumi.get(self, "hostname")
@hostname.setter
def hostname(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "hostname", value)
@property
@pulumi.getter
def labels(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
Labels to apply to this instance. These can be later modified by the setLabels method.
"""
return pulumi.get(self, "labels")
@labels.setter
def labels(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "labels", value)
@property
@pulumi.getter(name="machineType")
def machine_type(self) -> Optional[pulumi.Input[str]]:
"""
Full or partial URL of the machine type resource to use for this instance, in the format: zones/zone/machineTypes/machine-type. This is provided by the client when the instance is created. For example, the following is a valid partial url to a predefined machine type: zones/us-central1-f/machineTypes/n1-standard-1 To create a custom machine type, provide a URL to a machine type in the following format, where CPUS is 1 or an even number up to 32 (2, 4, 6, ... 24, etc), and MEMORY is the total memory for this instance. Memory must be a multiple of 256 MB and must be supplied in MB (e.g. 5 GB of memory is 5120 MB): zones/zone/machineTypes/custom-CPUS-MEMORY For example: zones/us-central1-f/machineTypes/custom-4-5120 For a full list of restrictions, read the Specifications for custom machine types.
"""
return pulumi.get(self, "machine_type")
@machine_type.setter
def machine_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "machine_type", value)
@property
@pulumi.getter
def metadata(self) -> Optional[pulumi.Input['MetadataArgs']]:
"""
The metadata key/value pairs assigned to this instance. This includes custom metadata and predefined keys.
"""
return pulumi.get(self, "metadata")
@metadata.setter
def metadata(self, value: Optional[pulumi.Input['MetadataArgs']]):
pulumi.set(self, "metadata", value)
@property
@pulumi.getter(name="minCpuPlatform")
def min_cpu_platform(self) -> Optional[pulumi.Input[str]]:
"""
Specifies a minimum CPU platform for the VM instance. Applicable values are the friendly names of CPU platforms, such as minCpuPlatform: "Intel Haswell" or minCpuPlatform: "Intel Sandy Bridge".
"""
return pulumi.get(self, "min_cpu_platform")
@min_cpu_platform.setter
def min_cpu_platform(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "min_cpu_platform", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the resource, provided by the client when initially creating the resource. The resource name must be 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="networkInterfaces")
def network_interfaces(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['NetworkInterfaceArgs']]]]:
"""
An array of network configurations for this instance. These specify how interfaces are configured to interact with other network services, such as connecting to the internet. Multiple interfaces are supported per instance.
"""
return pulumi.get(self, "network_interfaces")
@network_interfaces.setter
def network_interfaces(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['NetworkInterfaceArgs']]]]):
pulumi.set(self, "network_interfaces", value)
@property
@pulumi.getter(name="networkPerformanceConfig")
def network_performance_config(self) -> Optional[pulumi.Input['NetworkPerformanceConfigArgs']]:
return pulumi.get(self, "network_performance_config")
@network_performance_config.setter
def network_performance_config(self, value: Optional[pulumi.Input['NetworkPerformanceConfigArgs']]):
pulumi.set(self, "network_performance_config", value)
@property
@pulumi.getter
def params(self) -> Optional[pulumi.Input['InstanceParamsArgs']]:
"""
Input only. [Input Only] Additional params passed with the request, but not persisted as part of resource payload.
"""
return pulumi.get(self, "params")
@params.setter
def params(self, value: Optional[pulumi.Input['InstanceParamsArgs']]):
pulumi.set(self, "params", value)
@property
@pulumi.getter(name="postKeyRevocationActionType")
def post_key_revocation_action_type(self) -> Optional[pulumi.Input['InstancePostKeyRevocationActionType']]:
"""
PostKeyRevocationActionType of the instance.
"""
return pulumi.get(self, "post_key_revocation_action_type")
@post_key_revocation_action_type.setter
def post_key_revocation_action_type(self, value: Optional[pulumi.Input['InstancePostKeyRevocationActionType']]):
pulumi.set(self, "post_key_revocation_action_type", value)
@property
@pulumi.getter(name="privateIpv6GoogleAccess")
def private_ipv6_google_access(self) -> Optional[pulumi.Input['InstancePrivateIpv6GoogleAccess']]:
"""
The private IPv6 google access type for the VM. If not specified, use INHERIT_FROM_SUBNETWORK as default.
"""
return pulumi.get(self, "private_ipv6_google_access")
@private_ipv6_google_access.setter
def private_ipv6_google_access(self, value: Optional[pulumi.Input['InstancePrivateIpv6GoogleAccess']]):
pulumi.set(self, "private_ipv6_google_access", value)
@property
@pulumi.getter
def project(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "project")
@project.setter
def project(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "project", value)
@property
@pulumi.getter(name="requestId")
def request_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "request_id")
@request_id.setter
def request_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "request_id", value)
@property
@pulumi.getter(name="reservationAffinity")
def reservation_affinity(self) -> Optional[pulumi.Input['ReservationAffinityArgs']]:
"""
Specifies the reservations that this instance can consume from.
"""
return pulumi.get(self, "reservation_affinity")
@reservation_affinity.setter
def reservation_affinity(self, value: Optional[pulumi.Input['ReservationAffinityArgs']]):
pulumi.set(self, "reservation_affinity", value)
@property
@pulumi.getter(name="resourcePolicies")
def resource_policies(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Resource policies applied to this instance.
"""
return pulumi.get(self, "resource_policies")
@resource_policies.setter
def resource_policies(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "resource_policies", value)
@property
@pulumi.getter
def scheduling(self) -> Optional[pulumi.Input['SchedulingArgs']]:
"""
Sets the scheduling options for this instance.
"""
return pulumi.get(self, "scheduling")
@scheduling.setter
def scheduling(self, value: Optional[pulumi.Input['SchedulingArgs']]):
pulumi.set(self, "scheduling", value)
@property
@pulumi.getter(name="serviceAccounts")
def service_accounts(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ServiceAccountArgs']]]]:
"""
A list of service accounts, with their specified scopes, authorized for this instance. Only one service account per VM instance is supported. Service accounts generate access tokens that can be accessed through the metadata server and used to authenticate applications on the instance. See Service Accounts for more information.
"""
return pulumi.get(self, "service_accounts")
@service_accounts.setter
def service_accounts(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['ServiceAccountArgs']]]]):
pulumi.set(self, "service_accounts", value)
@property
@pulumi.getter(name="shieldedInstanceConfig")
def shielded_instance_config(self) -> Optional[pulumi.Input['ShieldedInstanceConfigArgs']]:
return pulumi.get(self, "shielded_instance_config")
@shielded_instance_config.setter
def shielded_instance_config(self, value: Optional[pulumi.Input['ShieldedInstanceConfigArgs']]):
pulumi.set(self, "shielded_instance_config", value)
@property
@pulumi.getter(name="shieldedInstanceIntegrityPolicy")
def shielded_instance_integrity_policy(self) -> Optional[pulumi.Input['ShieldedInstanceIntegrityPolicyArgs']]:
return pulumi.get(self, "shielded_instance_integrity_policy")
@shielded_instance_integrity_policy.setter
def shielded_instance_integrity_policy(self, value: Optional[pulumi.Input['ShieldedInstanceIntegrityPolicyArgs']]):
pulumi.set(self, "shielded_instance_integrity_policy", value)
@property
@pulumi.getter(name="shieldedVmConfig")
def shielded_vm_config(self) -> Optional[pulumi.Input['ShieldedVmConfigArgs']]:
"""
Deprecating, please use shielded_instance_config.
"""
return pulumi.get(self, "shielded_vm_config")
@shielded_vm_config.setter
def shielded_vm_config(self, value: Optional[pulumi.Input['ShieldedVmConfigArgs']]):
pulumi.set(self, "shielded_vm_config", value)
@property
@pulumi.getter(name="shieldedVmIntegrityPolicy")
def shielded_vm_integrity_policy(self) -> Optional[pulumi.Input['ShieldedVmIntegrityPolicyArgs']]:
"""
Deprecating, please use shielded_instance_integrity_policy.
"""
return pulumi.get(self, "shielded_vm_integrity_policy")
@shielded_vm_integrity_policy.setter
def shielded_vm_integrity_policy(self, value: Optional[pulumi.Input['ShieldedVmIntegrityPolicyArgs']]):
pulumi.set(self, "shielded_vm_integrity_policy", value)
@property
@pulumi.getter(name="sourceInstanceTemplate")
def source_instance_template(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "source_instance_template")
@source_instance_template.setter
def source_instance_template(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "source_instance_template", value)
@property
@pulumi.getter(name="sourceMachineImage")
def source_machine_image(self) -> Optional[pulumi.Input[str]]:
"""
Source machine image
"""
return pulumi.get(self, "source_machine_image")
@source_machine_image.setter
def source_machine_image(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "source_machine_image", value)
@property
@pulumi.getter(name="sourceMachineImageEncryptionKey")
def source_machine_image_encryption_key(self) -> Optional[pulumi.Input['CustomerEncryptionKeyArgs']]:
"""
Source machine image encryption key when creating an instance from a machine image.
"""
return pulumi.get(self, "source_machine_image_encryption_key")
@source_machine_image_encryption_key.setter
def source_machine_image_encryption_key(self, value: Optional[pulumi.Input['CustomerEncryptionKeyArgs']]):
pulumi.set(self, "source_machine_image_encryption_key", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input['TagsArgs']]:
"""
Tags to apply to this instance. Tags are used to identify valid sources or targets for network firewalls and are specified by the client during instance creation. The tags can be later modified by the setTags method. Each tag within the list must comply with RFC1035. Multiple tags can be specified via the 'tags.items' field.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input['TagsArgs']]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter
def zone(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "zone")
@zone.setter
def zone(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "zone", value)
class Instance(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
advanced_machine_features: Optional[pulumi.Input[pulumi.InputType['AdvancedMachineFeaturesArgs']]] = None,
can_ip_forward: Optional[pulumi.Input[bool]] = None,
confidential_instance_config: Optional[pulumi.Input[pulumi.InputType['ConfidentialInstanceConfigArgs']]] = None,
deletion_protection: Optional[pulumi.Input[bool]] = None,
description: Optional[pulumi.Input[str]] = None,
disks: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['AttachedDiskArgs']]]]] = None,
display_device: Optional[pulumi.Input[pulumi.InputType['DisplayDeviceArgs']]] = None,
erase_windows_vss_signature: Optional[pulumi.Input[bool]] = None,
guest_accelerators: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['AcceleratorConfigArgs']]]]] = None,
hostname: Optional[pulumi.Input[str]] = None,
labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
machine_type: Optional[pulumi.Input[str]] = None,
metadata: Optional[pulumi.Input[pulumi.InputType['MetadataArgs']]] = None,
min_cpu_platform: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
network_interfaces: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['NetworkInterfaceArgs']]]]] = None,
network_performance_config: Optional[pulumi.Input[pulumi.InputType['NetworkPerformanceConfigArgs']]] = None,
params: Optional[pulumi.Input[pulumi.InputType['InstanceParamsArgs']]] = None,
post_key_revocation_action_type: Optional[pulumi.Input['InstancePostKeyRevocationActionType']] = None,
private_ipv6_google_access: Optional[pulumi.Input['InstancePrivateIpv6GoogleAccess']] = None,
project: Optional[pulumi.Input[str]] = None,
request_id: Optional[pulumi.Input[str]] = None,
reservation_affinity: Optional[pulumi.Input[pulumi.InputType['ReservationAffinityArgs']]] = None,
resource_policies: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
scheduling: Optional[pulumi.Input[pulumi.InputType['SchedulingArgs']]] = None,
service_accounts: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ServiceAccountArgs']]]]] = None,
shielded_instance_config: Optional[pulumi.Input[pulumi.InputType['ShieldedInstanceConfigArgs']]] = None,
shielded_instance_integrity_policy: Optional[pulumi.Input[pulumi.InputType['ShieldedInstanceIntegrityPolicyArgs']]] = None,
shielded_vm_config: Optional[pulumi.Input[pulumi.InputType['ShieldedVmConfigArgs']]] = None,
shielded_vm_integrity_policy: Optional[pulumi.Input[pulumi.InputType['ShieldedVmIntegrityPolicyArgs']]] = None,
source_instance_template: Optional[pulumi.Input[str]] = None,
source_machine_image: Optional[pulumi.Input[str]] = None,
source_machine_image_encryption_key: Optional[pulumi.Input[pulumi.InputType['CustomerEncryptionKeyArgs']]] = None,
tags: Optional[pulumi.Input[pulumi.InputType['TagsArgs']]] = None,
zone: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Creates an instance resource in the specified project using the data included in the request.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[pulumi.InputType['AdvancedMachineFeaturesArgs']] advanced_machine_features: Controls for advanced machine-related behavior features.
:param pulumi.Input[bool] can_ip_forward: Allows this instance to send and receive packets with non-matching destination or source IPs. This is required if you plan to use this instance to forward routes. For more information, see Enabling IP Forwarding .
:param pulumi.Input[bool] deletion_protection: Whether the resource should be protected against deletion.
:param pulumi.Input[str] description: An optional description of this resource. Provide this property when you create the resource.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['AttachedDiskArgs']]]] disks: Array of disks associated with this instance. Persistent disks must be created before you can assign them.
:param pulumi.Input[pulumi.InputType['DisplayDeviceArgs']] display_device: Enables display device for the instance.
:param pulumi.Input[bool] erase_windows_vss_signature: Specifies whether the disks restored from source snapshots or source machine image should erase Windows specific VSS signature.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['AcceleratorConfigArgs']]]] guest_accelerators: A list of the type and count of accelerator cards attached to the instance.
:param pulumi.Input[str] hostname: Specifies the hostname of the instance. The specified hostname must be RFC1035 compliant. If hostname is not specified, the default hostname is [INSTANCE_NAME].c.[PROJECT_ID].internal when using the global DNS, and [INSTANCE_NAME].[ZONE].c.[PROJECT_ID].internal when using zonal DNS.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] labels: Labels to apply to this instance. These can be later modified by the setLabels method.
:param pulumi.Input[str] machine_type: Full or partial URL of the machine type resource to use for this instance, in the format: zones/zone/machineTypes/machine-type. This is provided by the client when the instance is created. For example, the following is a valid partial url to a predefined machine type: zones/us-central1-f/machineTypes/n1-standard-1 To create a custom machine type, provide a URL to a machine type in the following format, where CPUS is 1 or an even number up to 32 (2, 4, 6, ... 24, etc), and MEMORY is the total memory for this instance. Memory must be a multiple of 256 MB and must be supplied in MB (e.g. 5 GB of memory is 5120 MB): zones/zone/machineTypes/custom-CPUS-MEMORY For example: zones/us-central1-f/machineTypes/custom-4-5120 For a full list of restrictions, read the Specifications for custom machine types.
:param pulumi.Input[pulumi.InputType['MetadataArgs']] metadata: The metadata key/value pairs assigned to this instance. This includes custom metadata and predefined keys.
:param pulumi.Input[str] min_cpu_platform: Specifies a minimum CPU platform for the VM instance. Applicable values are the friendly names of CPU platforms, such as minCpuPlatform: "Intel Haswell" or minCpuPlatform: "Intel Sandy Bridge".
:param pulumi.Input[str] name: The name of the resource, provided by the client when initially creating the resource. The resource name must be 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['NetworkInterfaceArgs']]]] network_interfaces: An array of network configurations for this instance. These specify how interfaces are configured to interact with other network services, such as connecting to the internet. Multiple interfaces are supported per instance.
:param pulumi.Input[pulumi.InputType['InstanceParamsArgs']] params: Input only. [Input Only] Additional params passed with the request, but not persisted as part of resource payload.
:param pulumi.Input['InstancePostKeyRevocationActionType'] post_key_revocation_action_type: PostKeyRevocationActionType of the instance.
:param pulumi.Input['InstancePrivateIpv6GoogleAccess'] private_ipv6_google_access: The private IPv6 google access type for the VM. If not specified, use INHERIT_FROM_SUBNETWORK as default.
:param pulumi.Input[pulumi.InputType['ReservationAffinityArgs']] reservation_affinity: Specifies the reservations that this instance can consume from.
:param pulumi.Input[Sequence[pulumi.Input[str]]] resource_policies: Resource policies applied to this instance.
:param pulumi.Input[pulumi.InputType['SchedulingArgs']] scheduling: Sets the scheduling options for this instance.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ServiceAccountArgs']]]] service_accounts: A list of service accounts, with their specified scopes, authorized for this instance. Only one service account per VM instance is supported. Service accounts generate access tokens that can be accessed through the metadata server and used to authenticate applications on the instance. See Service Accounts for more information.
:param pulumi.Input[pulumi.InputType['ShieldedVmConfigArgs']] shielded_vm_config: Deprecating, please use shielded_instance_config.
:param pulumi.Input[pulumi.InputType['ShieldedVmIntegrityPolicyArgs']] shielded_vm_integrity_policy: Deprecating, please use shielded_instance_integrity_policy.
:param pulumi.Input[str] source_machine_image: Source machine image
:param pulumi.Input[pulumi.InputType['CustomerEncryptionKeyArgs']] source_machine_image_encryption_key: Source machine image encryption key when creating an instance from a machine image.
:param pulumi.Input[pulumi.InputType['TagsArgs']] tags: Tags to apply to this instance. Tags are used to identify valid sources or targets for network firewalls and are specified by the client during instance creation. The tags can be later modified by the setTags method. Each tag within the list must comply with RFC1035. Multiple tags can be specified via the 'tags.items' field.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: Optional[InstanceArgs] = None,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Creates an instance resource in the specified project using the data included in the request.
:param str resource_name: The name of the resource.
:param InstanceArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(InstanceArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
advanced_machine_features: Optional[pulumi.Input[pulumi.InputType['AdvancedMachineFeaturesArgs']]] = None,
can_ip_forward: Optional[pulumi.Input[bool]] = None,
confidential_instance_config: Optional[pulumi.Input[pulumi.InputType['ConfidentialInstanceConfigArgs']]] = None,
deletion_protection: Optional[pulumi.Input[bool]] = None,
description: Optional[pulumi.Input[str]] = None,
disks: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['AttachedDiskArgs']]]]] = None,
display_device: Optional[pulumi.Input[pulumi.InputType['DisplayDeviceArgs']]] = None,
erase_windows_vss_signature: Optional[pulumi.Input[bool]] = None,
guest_accelerators: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['AcceleratorConfigArgs']]]]] = None,
hostname: Optional[pulumi.Input[str]] = None,
labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
machine_type: Optional[pulumi.Input[str]] = None,
metadata: Optional[pulumi.Input[pulumi.InputType['MetadataArgs']]] = None,
min_cpu_platform: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
network_interfaces: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['NetworkInterfaceArgs']]]]] = None,
network_performance_config: Optional[pulumi.Input[pulumi.InputType['NetworkPerformanceConfigArgs']]] = None,
params: Optional[pulumi.Input[pulumi.InputType['InstanceParamsArgs']]] = None,
post_key_revocation_action_type: Optional[pulumi.Input['InstancePostKeyRevocationActionType']] = None,
private_ipv6_google_access: Optional[pulumi.Input['InstancePrivateIpv6GoogleAccess']] = None,
project: Optional[pulumi.Input[str]] = None,
request_id: Optional[pulumi.Input[str]] = None,
reservation_affinity: Optional[pulumi.Input[pulumi.InputType['ReservationAffinityArgs']]] = None,
resource_policies: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
scheduling: Optional[pulumi.Input[pulumi.InputType['SchedulingArgs']]] = None,
service_accounts: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ServiceAccountArgs']]]]] = None,
shielded_instance_config: Optional[pulumi.Input[pulumi.InputType['ShieldedInstanceConfigArgs']]] = None,
shielded_instance_integrity_policy: Optional[pulumi.Input[pulumi.InputType['ShieldedInstanceIntegrityPolicyArgs']]] = None,
shielded_vm_config: Optional[pulumi.Input[pulumi.InputType['ShieldedVmConfigArgs']]] = None,
shielded_vm_integrity_policy: Optional[pulumi.Input[pulumi.InputType['ShieldedVmIntegrityPolicyArgs']]] = None,
source_instance_template: Optional[pulumi.Input[str]] = None,
source_machine_image: Optional[pulumi.Input[str]] = None,
source_machine_image_encryption_key: Optional[pulumi.Input[pulumi.InputType['CustomerEncryptionKeyArgs']]] = None,
tags: Optional[pulumi.Input[pulumi.InputType['TagsArgs']]] = None,
zone: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = InstanceArgs.__new__(InstanceArgs)
__props__.__dict__["advanced_machine_features"] = advanced_machine_features
__props__.__dict__["can_ip_forward"] = can_ip_forward
__props__.__dict__["confidential_instance_config"] = confidential_instance_config
__props__.__dict__["deletion_protection"] = deletion_protection
__props__.__dict__["description"] = description
__props__.__dict__["disks"] = disks
__props__.__dict__["display_device"] = display_device
__props__.__dict__["erase_windows_vss_signature"] = erase_windows_vss_signature
__props__.__dict__["guest_accelerators"] = guest_accelerators
__props__.__dict__["hostname"] = hostname
__props__.__dict__["labels"] = labels
__props__.__dict__["machine_type"] = machine_type
__props__.__dict__["metadata"] = metadata
__props__.__dict__["min_cpu_platform"] = min_cpu_platform
__props__.__dict__["name"] = name
__props__.__dict__["network_interfaces"] = network_interfaces
__props__.__dict__["network_performance_config"] = network_performance_config
__props__.__dict__["params"] = params
__props__.__dict__["post_key_revocation_action_type"] = post_key_revocation_action_type
__props__.__dict__["private_ipv6_google_access"] = private_ipv6_google_access
__props__.__dict__["project"] = project
__props__.__dict__["request_id"] = request_id
__props__.__dict__["reservation_affinity"] = reservation_affinity
__props__.__dict__["resource_policies"] = resource_policies
__props__.__dict__["scheduling"] = scheduling
__props__.__dict__["service_accounts"] = service_accounts
__props__.__dict__["shielded_instance_config"] = shielded_instance_config
__props__.__dict__["shielded_instance_integrity_policy"] = shielded_instance_integrity_policy
__props__.__dict__["shielded_vm_config"] = shielded_vm_config
__props__.__dict__["shielded_vm_integrity_policy"] = shielded_vm_integrity_policy
__props__.__dict__["source_instance_template"] = source_instance_template
__props__.__dict__["source_machine_image"] = source_machine_image
__props__.__dict__["source_machine_image_encryption_key"] = source_machine_image_encryption_key
__props__.__dict__["tags"] = tags
__props__.__dict__["zone"] = zone
__props__.__dict__["cpu_platform"] = None
__props__.__dict__["creation_timestamp"] = None
__props__.__dict__["fingerprint"] = None
__props__.__dict__["kind"] = None
__props__.__dict__["label_fingerprint"] = None
__props__.__dict__["last_start_timestamp"] = None
__props__.__dict__["last_stop_timestamp"] = None
__props__.__dict__["last_suspended_timestamp"] = None
__props__.__dict__["satisfies_pzs"] = None
__props__.__dict__["self_link"] = None
__props__.__dict__["start_restricted"] = None
__props__.__dict__["status"] = None
__props__.__dict__["status_message"] = None
super(Instance, __self__).__init__(
'google-native:compute/beta:Instance',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'Instance':
"""
Get an existing Instance resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = InstanceArgs.__new__(InstanceArgs)
__props__.__dict__["advanced_machine_features"] = None
__props__.__dict__["can_ip_forward"] = None
__props__.__dict__["confidential_instance_config"] = None
__props__.__dict__["cpu_platform"] = None
__props__.__dict__["creation_timestamp"] = None
__props__.__dict__["deletion_protection"] = None
__props__.__dict__["description"] = None
__props__.__dict__["disks"] = None
__props__.__dict__["display_device"] = None
__props__.__dict__["erase_windows_vss_signature"] = None
__props__.__dict__["fingerprint"] = None
__props__.__dict__["guest_accelerators"] = None
__props__.__dict__["hostname"] = None
__props__.__dict__["kind"] = None
__props__.__dict__["label_fingerprint"] = None
__props__.__dict__["labels"] = None
__props__.__dict__["last_start_timestamp"] = None
__props__.__dict__["last_stop_timestamp"] = None
__props__.__dict__["last_suspended_timestamp"] = None
__props__.__dict__["machine_type"] = None
__props__.__dict__["metadata"] = None
__props__.__dict__["min_cpu_platform"] = None
__props__.__dict__["name"] = None
__props__.__dict__["network_interfaces"] = None
__props__.__dict__["network_performance_config"] = None
__props__.__dict__["params"] = None
__props__.__dict__["post_key_revocation_action_type"] = None
__props__.__dict__["private_ipv6_google_access"] = None
__props__.__dict__["reservation_affinity"] = None
__props__.__dict__["resource_policies"] = None
__props__.__dict__["satisfies_pzs"] = None
__props__.__dict__["scheduling"] = None
__props__.__dict__["self_link"] = None
__props__.__dict__["service_accounts"] = None
__props__.__dict__["shielded_instance_config"] = None
__props__.__dict__["shielded_instance_integrity_policy"] = None
__props__.__dict__["shielded_vm_config"] = None
__props__.__dict__["shielded_vm_integrity_policy"] = None
__props__.__dict__["source_machine_image"] = None
__props__.__dict__["source_machine_image_encryption_key"] = None
__props__.__dict__["start_restricted"] = None
__props__.__dict__["status"] = None
__props__.__dict__["status_message"] = None
__props__.__dict__["tags"] = None
__props__.__dict__["zone"] = None
return Instance(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="advancedMachineFeatures")
def advanced_machine_features(self) -> pulumi.Output['outputs.AdvancedMachineFeaturesResponse']:
"""
Controls for advanced machine-related behavior features.
"""
return pulumi.get(self, "advanced_machine_features")
@property
@pulumi.getter(name="canIpForward")
def can_ip_forward(self) -> pulumi.Output[bool]:
"""
Allows this instance to send and receive packets with non-matching destination or source IPs. This is required if you plan to use this instance to forward routes. For more information, see Enabling IP Forwarding .
"""
return pulumi.get(self, "can_ip_forward")
@property
@pulumi.getter(name="confidentialInstanceConfig")
def confidential_instance_config(self) -> pulumi.Output['outputs.ConfidentialInstanceConfigResponse']:
return pulumi.get(self, "confidential_instance_config")
@property
@pulumi.getter(name="cpuPlatform")
def cpu_platform(self) -> pulumi.Output[str]:
"""
The CPU platform used by this instance.
"""
return pulumi.get(self, "cpu_platform")
@property
@pulumi.getter(name="creationTimestamp")
def creation_timestamp(self) -> pulumi.Output[str]:
"""
Creation timestamp in RFC3339 text format.
"""
return pulumi.get(self, "creation_timestamp")
@property
@pulumi.getter(name="deletionProtection")
def deletion_protection(self) -> pulumi.Output[bool]:
"""
Whether the resource should be protected against deletion.
"""
return pulumi.get(self, "deletion_protection")
@property
@pulumi.getter
def description(self) -> pulumi.Output[str]:
"""
An optional description of this resource. Provide this property when you create the resource.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter
def disks(self) -> pulumi.Output[Sequence['outputs.AttachedDiskResponse']]:
"""
Array of disks associated with this instance. Persistent disks must be created before you can assign them.
"""
return pulumi.get(self, "disks")
@property
@pulumi.getter(name="displayDevice")
def display_device(self) -> pulumi.Output['outputs.DisplayDeviceResponse']:
"""
Enables display device for the instance.
"""
return pulumi.get(self, "display_device")
@property
@pulumi.getter(name="eraseWindowsVssSignature")
def erase_windows_vss_signature(self) -> pulumi.Output[bool]:
"""
Specifies whether the disks restored from source snapshots or source machine image should erase Windows specific VSS signature.
"""
return pulumi.get(self, "erase_windows_vss_signature")
@property
@pulumi.getter
def fingerprint(self) -> pulumi.Output[str]:
"""
Specifies a fingerprint for this resource, which is essentially a hash of the instance's contents and used for optimistic locking. The fingerprint is initially generated by Compute Engine and changes after every request to modify or update the instance. You must always provide an up-to-date fingerprint hash in order to update the instance. To see the latest fingerprint, make get() request to the instance.
"""
return pulumi.get(self, "fingerprint")
@property
@pulumi.getter(name="guestAccelerators")
def guest_accelerators(self) -> pulumi.Output[Sequence['outputs.AcceleratorConfigResponse']]:
"""
A list of the type and count of accelerator cards attached to the instance.
"""
return pulumi.get(self, "guest_accelerators")
@property
@pulumi.getter
def hostname(self) -> pulumi.Output[str]:
"""
Specifies the hostname of the instance. The specified hostname must be RFC1035 compliant. If hostname is not specified, the default hostname is [INSTANCE_NAME].c.[PROJECT_ID].internal when using the global DNS, and [INSTANCE_NAME].[ZONE].c.[PROJECT_ID].internal when using zonal DNS.
"""
return pulumi.get(self, "hostname")
@property
@pulumi.getter
def kind(self) -> pulumi.Output[str]:
"""
Type of the resource. Always compute#instance for instances.
"""
return pulumi.get(self, "kind")
@property
@pulumi.getter(name="labelFingerprint")
def label_fingerprint(self) -> pulumi.Output[str]:
"""
A fingerprint for this request, which is essentially a hash of the label's contents and used for optimistic locking. The fingerprint is initially generated by Compute Engine and changes after every request to modify or update labels. You must always provide an up-to-date fingerprint hash in order to update or change labels. To see the latest fingerprint, make get() request to the instance.
"""
return pulumi.get(self, "label_fingerprint")
@property
@pulumi.getter
def labels(self) -> pulumi.Output[Mapping[str, str]]:
"""
Labels to apply to this instance. These can be later modified by the setLabels method.
"""
return pulumi.get(self, "labels")
@property
@pulumi.getter(name="lastStartTimestamp")
def last_start_timestamp(self) -> pulumi.Output[str]:
"""
Last start timestamp in RFC3339 text format.
"""
return pulumi.get(self, "last_start_timestamp")
@property
@pulumi.getter(name="lastStopTimestamp")
def last_stop_timestamp(self) -> pulumi.Output[str]:
"""
Last stop timestamp in RFC3339 text format.
"""
return pulumi.get(self, "last_stop_timestamp")
@property
@pulumi.getter(name="lastSuspendedTimestamp")
def last_suspended_timestamp(self) -> pulumi.Output[str]:
"""
Last suspended timestamp in RFC3339 text format.
"""
return pulumi.get(self, "last_suspended_timestamp")
@property
@pulumi.getter(name="machineType")
def machine_type(self) -> pulumi.Output[str]:
"""
Full or partial URL of the machine type resource to use for this instance, in the format: zones/zone/machineTypes/machine-type. This is provided by the client when the instance is created. For example, the following is a valid partial url to a predefined machine type: zones/us-central1-f/machineTypes/n1-standard-1 To create a custom machine type, provide a URL to a machine type in the following format, where CPUS is 1 or an even number up to 32 (2, 4, 6, ... 24, etc), and MEMORY is the total memory for this instance. Memory must be a multiple of 256 MB and must be supplied in MB (e.g. 5 GB of memory is 5120 MB): zones/zone/machineTypes/custom-CPUS-MEMORY For example: zones/us-central1-f/machineTypes/custom-4-5120 For a full list of restrictions, read the Specifications for custom machine types.
"""
return pulumi.get(self, "machine_type")
@property
@pulumi.getter
def metadata(self) -> pulumi.Output['outputs.MetadataResponse']:
"""
The metadata key/value pairs assigned to this instance. This includes custom metadata and predefined keys.
"""
return pulumi.get(self, "metadata")
@property
@pulumi.getter(name="minCpuPlatform")
def min_cpu_platform(self) -> pulumi.Output[str]:
"""
Specifies a minimum CPU platform for the VM instance. Applicable values are the friendly names of CPU platforms, such as minCpuPlatform: "Intel Haswell" or minCpuPlatform: "Intel Sandy Bridge".
"""
return pulumi.get(self, "min_cpu_platform")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name of the resource, provided by the client when initially creating the resource. The resource name must be 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="networkInterfaces")
def network_interfaces(self) -> pulumi.Output[Sequence['outputs.NetworkInterfaceResponse']]:
"""
An array of network configurations for this instance. These specify how interfaces are configured to interact with other network services, such as connecting to the internet. Multiple interfaces are supported per instance.
"""
return pulumi.get(self, "network_interfaces")
@property
@pulumi.getter(name="networkPerformanceConfig")
def network_performance_config(self) -> pulumi.Output['outputs.NetworkPerformanceConfigResponse']:
return pulumi.get(self, "network_performance_config")
@property
@pulumi.getter
def params(self) -> pulumi.Output['outputs.InstanceParamsResponse']:
"""
Input only. [Input Only] Additional params passed with the request, but not persisted as part of resource payload.
"""
return pulumi.get(self, "params")
@property
@pulumi.getter(name="postKeyRevocationActionType")
def post_key_revocation_action_type(self) -> pulumi.Output[str]:
"""
PostKeyRevocationActionType of the instance.
"""
return pulumi.get(self, "post_key_revocation_action_type")
@property
@pulumi.getter(name="privateIpv6GoogleAccess")
def private_ipv6_google_access(self) -> pulumi.Output[str]:
"""
The private IPv6 google access type for the VM. If not specified, use INHERIT_FROM_SUBNETWORK as default.
"""
return pulumi.get(self, "private_ipv6_google_access")
@property
@pulumi.getter(name="reservationAffinity")
def reservation_affinity(self) -> pulumi.Output['outputs.ReservationAffinityResponse']:
"""
Specifies the reservations that this instance can consume from.
"""
return pulumi.get(self, "reservation_affinity")
@property
@pulumi.getter(name="resourcePolicies")
def resource_policies(self) -> pulumi.Output[Sequence[str]]:
"""
Resource policies applied to this instance.
"""
return pulumi.get(self, "resource_policies")
@property
@pulumi.getter(name="satisfiesPzs")
def satisfies_pzs(self) -> pulumi.Output[bool]:
"""
Reserved for future use.
"""
return pulumi.get(self, "satisfies_pzs")
@property
@pulumi.getter
def scheduling(self) -> pulumi.Output['outputs.SchedulingResponse']:
"""
Sets the scheduling options for this instance.
"""
return pulumi.get(self, "scheduling")
@property
@pulumi.getter(name="selfLink")
def self_link(self) -> pulumi.Output[str]:
"""
Server-defined URL for this resource.
"""
return pulumi.get(self, "self_link")
@property
@pulumi.getter(name="serviceAccounts")
def service_accounts(self) -> pulumi.Output[Sequence['outputs.ServiceAccountResponse']]:
"""
A list of service accounts, with their specified scopes, authorized for this instance. Only one service account per VM instance is supported. Service accounts generate access tokens that can be accessed through the metadata server and used to authenticate applications on the instance. See Service Accounts for more information.
"""
return pulumi.get(self, "service_accounts")
@property
@pulumi.getter(name="shieldedInstanceConfig")
def shielded_instance_config(self) -> pulumi.Output['outputs.ShieldedInstanceConfigResponse']:
return pulumi.get(self, "shielded_instance_config")
@property
@pulumi.getter(name="shieldedInstanceIntegrityPolicy")
def shielded_instance_integrity_policy(self) -> pulumi.Output['outputs.ShieldedInstanceIntegrityPolicyResponse']:
return pulumi.get(self, "shielded_instance_integrity_policy")
@property
@pulumi.getter(name="shieldedVmConfig")
def shielded_vm_config(self) -> pulumi.Output['outputs.ShieldedVmConfigResponse']:
"""
Deprecating, please use shielded_instance_config.
"""
return pulumi.get(self, "shielded_vm_config")
@property
@pulumi.getter(name="shieldedVmIntegrityPolicy")
def shielded_vm_integrity_policy(self) -> pulumi.Output['outputs.ShieldedVmIntegrityPolicyResponse']:
"""
Deprecating, please use shielded_instance_integrity_policy.
"""
return pulumi.get(self, "shielded_vm_integrity_policy")
@property
@pulumi.getter(name="sourceMachineImage")
def source_machine_image(self) -> pulumi.Output[str]:
"""
Source machine image
"""
return pulumi.get(self, "source_machine_image")
@property
@pulumi.getter(name="sourceMachineImageEncryptionKey")
def source_machine_image_encryption_key(self) -> pulumi.Output['outputs.CustomerEncryptionKeyResponse']:
"""
Source machine image encryption key when creating an instance from a machine image.
"""
return pulumi.get(self, "source_machine_image_encryption_key")
@property
@pulumi.getter(name="startRestricted")
def start_restricted(self) -> pulumi.Output[bool]:
"""
Whether a VM has been restricted for start because Compute Engine has detected suspicious activity.
"""
return pulumi.get(self, "start_restricted")
@property
@pulumi.getter
def status(self) -> pulumi.Output[str]:
"""
The status of the instance. One of the following values: PROVISIONING, STAGING, RUNNING, STOPPING, SUSPENDING, SUSPENDED, REPAIRING, and TERMINATED. For more information about the status of the instance, see Instance life cycle.
"""
return pulumi.get(self, "status")
@property
@pulumi.getter(name="statusMessage")
def status_message(self) -> pulumi.Output[str]:
"""
An optional, human-readable explanation of the status.
"""
return pulumi.get(self, "status_message")
@property
@pulumi.getter
def tags(self) -> pulumi.Output['outputs.TagsResponse']:
"""
Tags to apply to this instance. Tags are used to identify valid sources or targets for network firewalls and are specified by the client during instance creation. The tags can be later modified by the setTags method. Each tag within the list must comply with RFC1035. Multiple tags can be specified via the 'tags.items' field.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def zone(self) -> pulumi.Output[str]:
"""
URL of the zone where the instance resides. You must specify this field as part of the HTTP request URL. It is not settable as a field in the request body.
"""
return pulumi.get(self, "zone")
| 58.881239
| 852
| 0.703815
| 7,945
| 68,420
| 5.818376
| 0.063436
| 0.0652
| 0.071928
| 0.032881
| 0.855517
| 0.799598
| 0.739546
| 0.695265
| 0.664265
| 0.610747
| 0
| 0.004322
| 0.202002
| 68,420
| 1,161
| 853
| 58.931955
| 0.842341
| 0.338746
| 0
| 0.452
| 1
| 0
| 0.189814
| 0.106688
| 0
| 0
| 0
| 0
| 0
| 1
| 0.161333
| false
| 0.001333
| 0.010667
| 0.016
| 0.282667
| 0.012
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
be33757fc26f8daf229b6ea72f9cb3ec2d25ec28
| 1,253
|
py
|
Python
|
pypedream/expressions/unary.py
|
Nukleon84/pypedream
|
de074e1b21d65eb3a02e1b0719fff1b4c2b7ae92
|
[
"MIT"
] | 9
|
2020-05-17T15:23:33.000Z
|
2021-04-28T19:03:00.000Z
|
pypedream/expressions/unary.py
|
Nukleon84/pypedream
|
de074e1b21d65eb3a02e1b0719fff1b4c2b7ae92
|
[
"MIT"
] | 1
|
2020-07-30T11:53:53.000Z
|
2020-07-30T11:53:53.000Z
|
pypedream/expressions/unary.py
|
Nukleon84/pypedream
|
de074e1b21d65eb3a02e1b0719fff1b4c2b7ae92
|
[
"MIT"
] | null | null | null |
from . basic import UnaryExpression
import math
class Exp(UnaryExpression):
def __init__(self, argument):
super(Exp,self).__init__('exp',argument)
def fullEvaluate(self):
return math.exp(self.argument.eval())
def diff(self, variable):
return self.argument.diff(variable)*self.eval()
class Ln(UnaryExpression):
def __init__(self, argument):
super(Ln,self).__init__('ln',argument)
def fullEvaluate(self):
return math.log(self.argument.eval())
def diff(self, variable):
return 1/self.argument.eval()*self.argument.diff(variable)
class Sqrt(UnaryExpression):
def __init__(self, argument):
super(Sqrt,self).__init__('sqrt',argument)
def fullEvaluate(self):
return math.sqrt(self.argument.eval())
def diff(self, variable):
return 1.0/(2.0*self.argument.eval())*self.argument.diff(variable)
class Par(UnaryExpression):
def __init__(self, argument):
super(Par,self).__init__('',argument)
def fullEvaluate(self):
return self.argument.eval()
def diff(self, variable):
return self.argument.diff(variable)
def print(self):
return f"[{self.argument.print()}]"
| 24.568627
| 82
| 0.644852
| 148
| 1,253
| 5.243243
| 0.175676
| 0.231959
| 0.123711
| 0.134021
| 0.778351
| 0.735825
| 0.391753
| 0.391753
| 0.275773
| 0.167526
| 0
| 0.005123
| 0.221069
| 1,253
| 51
| 83
| 24.568627
| 0.789959
| 0
| 0
| 0.375
| 0
| 0
| 0.027113
| 0.019936
| 0
| 0
| 0
| 0
| 0
| 1
| 0.40625
| false
| 0
| 0.0625
| 0.28125
| 0.875
| 0.0625
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
be433c8540502ebc11dba5d79309d71f84542f82
| 4,157
|
py
|
Python
|
tests/test_gameplay.py
|
michael-gracie/camelup
|
fdf8295cbac179e254628d1595f3ec6ccc67edd3
|
[
"MIT"
] | null | null | null |
tests/test_gameplay.py
|
michael-gracie/camelup
|
fdf8295cbac179e254628d1595f3ec6ccc67edd3
|
[
"MIT"
] | 3
|
2019-07-20T16:10:13.000Z
|
2019-09-21T17:54:30.000Z
|
tests/test_gameplay.py
|
michael-gracie/camelup
|
fdf8295cbac179e254628d1595f3ec6ccc67edd3
|
[
"MIT"
] | 1
|
2019-09-25T23:39:37.000Z
|
2019-09-25T23:39:37.000Z
|
"""Tests for gameplay"""
from copy import deepcopy
import pytest
import camelup.gameplay as gameplay
camel_dict = {
"red": {"height": 1, "space": 1, "need_roll": True},
"blue": {"height": 2, "space": 1, "need_roll": True},
"green": {"height": 1, "space": 3, "need_roll": True},
"yellow": {"height": 1, "space": 4, "need_roll": True},
"white": {"height": 2, "space": 4, "need_roll": True},
}
tiles_dict = {5: {"type": "block", "player": 1}, 2: {"type": "skip", "player": 1}}
@pytest.fixture
def camel_dict_copy():
return deepcopy(camel_dict)
to_move = [
(4, 2, (2, 1, ["white"])),
(4, 1, (1, 2, ["white", "yellow"])),
(3, 1, (1, 1, ["green"])),
]
@pytest.mark.parametrize("space,height,expected", to_move)
def test_camels_to_move(space, height, expected):
assert gameplay.camels_to_move(camel_dict, space, height)[0] == expected[0]
assert gameplay.camels_to_move(camel_dict, space, height)[1] == expected[1]
assert (
gameplay.camels_to_move(camel_dict, space, height)[2].sort()
== expected[2].sort()
)
in_dest = [(4, (2, ["white", "yellow"])), (3, (1, ["green"])), (2, (0, []))]
@pytest.mark.parametrize("destination,expected", in_dest)
def test_camels_in_dest(destination, expected):
assert gameplay.camels_in_dest(camel_dict, destination)[0] == expected[0]
assert (
gameplay.camels_in_dest(camel_dict, destination)[1].sort() == expected[1].sort()
)
def test_block_move_reg(camel_dict_copy):
gameplay.block_move(camel_dict_copy, ["red", "blue"], ["white", "yellow"], 4, 1, 2)
assert camel_dict_copy["white"]["space"] == 4
assert camel_dict_copy["yellow"]["space"] == 4
assert camel_dict_copy["blue"]["space"] == 4
assert camel_dict_copy["red"]["space"] == 4
assert camel_dict_copy["white"]["height"] == 4
assert camel_dict_copy["yellow"]["height"] == 3
assert camel_dict_copy["blue"]["height"] == 2
assert camel_dict_copy["red"]["height"] == 1
def test_block_move_same_square(camel_dict_copy):
gameplay.block_move(
camel_dict_copy, ["white", "yellow"], ["white", "yellow"], 4, 1, 2
)
assert camel_dict_copy["white"]["space"] == 4
assert camel_dict_copy["yellow"]["space"] == 4
assert camel_dict_copy["white"]["height"] == 2
assert camel_dict_copy["yellow"]["height"] == 1
def test_block_move_same_square_only_one(camel_dict_copy):
gameplay.block_move(camel_dict_copy, ["white"], ["white", "yellow"], 4, 2, 1)
assert camel_dict_copy["white"]["space"] == 4
assert camel_dict_copy["yellow"]["space"] == 4
assert camel_dict_copy["white"]["height"] == 1
assert camel_dict_copy["yellow"]["height"] == 2
def test_block_move_open_square(camel_dict_copy):
gameplay.block_move(camel_dict_copy, ["white"], [], 5, 2, 1)
assert camel_dict_copy["white"]["space"] == 5
assert camel_dict_copy["white"]["height"] == 1
def test_reg_move_reg_square(camel_dict_copy):
gameplay.reg_move(camel_dict_copy, ["red", "blue"], 4, 1, 2)
assert camel_dict_copy["blue"]["space"] == 4
assert camel_dict_copy["red"]["space"] == 4
assert camel_dict_copy["blue"]["height"] == 4
assert camel_dict_copy["red"]["height"] == 3
def test_reg_move_open_square(camel_dict_copy):
gameplay.reg_move(camel_dict_copy, ["blue"], 2, 2, 0)
assert camel_dict_copy["blue"]["space"] == 2
assert camel_dict_copy["blue"]["height"] == 1
def test_move_with_block(camel_dict_copy):
assert gameplay.move(camel_dict_copy, tiles_dict, "red", 4) == 5
assert camel_dict_copy["white"]["space"] == 4
assert camel_dict_copy["yellow"]["space"] == 4
assert camel_dict_copy["blue"]["space"] == 4
assert camel_dict_copy["red"]["space"] == 4
assert camel_dict_copy["white"]["height"] == 4
assert camel_dict_copy["yellow"]["height"] == 3
assert camel_dict_copy["blue"]["height"] == 2
assert camel_dict_copy["red"]["height"] == 1
def test_move_with_skip(camel_dict_copy):
assert gameplay.move(camel_dict_copy, tiles_dict, "blue", 1) == 2
assert camel_dict_copy["blue"]["space"] == 3
assert camel_dict_copy["blue"]["height"] == 2
| 34.641667
| 88
| 0.652153
| 599
| 4,157
| 4.235392
| 0.09182
| 0.205755
| 0.261332
| 0.254631
| 0.754434
| 0.715018
| 0.632243
| 0.611352
| 0.519117
| 0.447773
| 0
| 0.02796
| 0.156844
| 4,157
| 119
| 89
| 34.932773
| 0.695863
| 0.00433
| 0
| 0.313953
| 0
| 0
| 0.157755
| 0.005081
| 0
| 0
| 0
| 0
| 0.476744
| 1
| 0.127907
| false
| 0
| 0.034884
| 0.011628
| 0.174419
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
be61449e19e35694b4df1ab9a51273c48320f5c5
| 41
|
py
|
Python
|
__main__.py
|
deepy/arpg
|
b29cc0dbc94be9e6511c85bfcf87df6b2ab72506
|
[
"BSD-3-Clause"
] | null | null | null |
__main__.py
|
deepy/arpg
|
b29cc0dbc94be9e6511c85bfcf87df6b2ab72506
|
[
"BSD-3-Clause"
] | null | null | null |
__main__.py
|
deepy/arpg
|
b29cc0dbc94be9e6511c85bfcf87df6b2ab72506
|
[
"BSD-3-Clause"
] | null | null | null |
from arpg.application import main
main()
| 20.5
| 34
| 0.804878
| 6
| 41
| 5.5
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.121951
| 41
| 2
| 35
| 20.5
| 0.916667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
be65539508e7150c1ae1cb2050cdf9d6dca9774d
| 147
|
py
|
Python
|
ex009.py
|
felipesch92/PythonExercicios
|
73edcbde6beaabcfc86af3dd6e58473f1eecabd3
|
[
"MIT"
] | null | null | null |
ex009.py
|
felipesch92/PythonExercicios
|
73edcbde6beaabcfc86af3dd6e58473f1eecabd3
|
[
"MIT"
] | null | null | null |
ex009.py
|
felipesch92/PythonExercicios
|
73edcbde6beaabcfc86af3dd6e58473f1eecabd3
|
[
"MIT"
] | null | null | null |
n = int(input('Informe um número: '))
x = 1
print('-'*12)
while x <= 10:
print('{} x {:2} = {}'.format(n, x, n*x))
x = x + 1
print('-'*12)
| 18.375
| 45
| 0.462585
| 26
| 147
| 2.615385
| 0.538462
| 0.058824
| 0.205882
| 0.264706
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.080357
| 0.238095
| 147
| 7
| 46
| 21
| 0.526786
| 0
| 0
| 0.285714
| 0
| 0
| 0.238095
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.428571
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
be76964ecbc69c33caefabd70e4bb134021e5977
| 20,046
|
py
|
Python
|
inventories/dynamic/leaf_switches.py
|
takamitsu-iida/ansible-on-wsl
|
667c1f70eb55f078e4200de7298ae490858ef472
|
[
"MIT"
] | null | null | null |
inventories/dynamic/leaf_switches.py
|
takamitsu-iida/ansible-on-wsl
|
667c1f70eb55f078e4200de7298ae490858ef472
|
[
"MIT"
] | 5
|
2021-07-05T09:25:12.000Z
|
2021-07-10T04:25:52.000Z
|
inventories/dynamic/leaf_switches.py
|
takamitsu-iida/ansible-on-wsl
|
667c1f70eb55f078e4200de7298ae490858ef472
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python3
# -*- coding: utf-8 -*-
__author__ = "takamitsu-iida"
__version__ = "1.0"
__date__ = "2021/07/21"
import argparse
import json
import os
import re
import sys
#
# ACI Spine switch
# show lldp neighbors detail
#
ACI_SHOW_LLDP_NEIGHBORS_DETAIL = """
nw-00-03-06-00# show lldp neighbors detail
Capability codes:
(R) Router, (B) Bridge, (T) Telephone, (C) DOCSIS Cable Device
(W) WLAN Access Point, (P) Repeater, (S) Station, (O) Other
Device ID Local Intf Hold-time Capability Port ID
Chassis id: 500f.8079.e2e3
Port id: Eth1/49
Local Port id: Eth1/1
Port Description: topology/pod-1/paths-3801/pathep-[eth1/49]
System Name: nw-00-01-19-00
System Description: topology/pod-1/node-3801
Time remaining: 91 seconds
System Capabilities: B, R
Enabled Capabilities: B, R
Management Address: 10.254.7.1
Vlan ID: not advertised
Chassis id: 707d.b986.a1af
Port id: Eth1/49
Local Port id: Eth1/2
Port Description: topology/pod-1/paths-3802/pathep-[eth1/49]
System Name: nw-00-01-20-00
System Description: topology/pod-1/node-3802
Time remaining: 109 seconds
System Capabilities: B, R
Enabled Capabilities: B, R
Management Address: 10.254.7.2
Vlan ID: not advertised
Chassis id: 7070.8b8c.f8bf
Port id: Eth1/49
Local Port id: Eth1/3
Port Description: topology/pod-1/paths-3803/pathep-[eth1/49]
System Name: nw-00-02-17-00
System Description: topology/pod-1/node-3803
Time remaining: 112 seconds
System Capabilities: B, R
Enabled Capabilities: B, R
Management Address: 10.254.7.3
Vlan ID: not advertised
Chassis id: 707d.b986.f930
Port id: Eth1/49
Local Port id: Eth1/4
Port Description: topology/pod-1/paths-3804/pathep-[eth1/49]
System Name: nw-00-02-18-00
System Description: topology/pod-1/node-3804
Time remaining: 103 seconds
System Capabilities: B, R
Enabled Capabilities: B, R
Management Address: 10.254.7.4
Vlan ID: not advertised
Chassis id: 7070.8bf4.b169
Port id: Eth1/49
Local Port id: Eth1/5
Port Description: topology/pod-1/paths-3805/pathep-[eth1/49]
System Name: nw-00-02-33-00
System Description: topology/pod-1/node-3805
Time remaining: 98 seconds
System Capabilities: B, R
Enabled Capabilities: B, R
Management Address: 10.254.7.5
Vlan ID: not advertised
Chassis id: 3890.a58d.4b97
Port id: Eth1/49
Local Port id: Eth1/6
Port Description: topology/pod-1/paths-3806/pathep-[eth1/49]
System Name: nw-00-02-34-00
System Description: topology/pod-1/node-3806
Time remaining: 100 seconds
System Capabilities: B, R
Enabled Capabilities: B, R
Management Address: 10.254.7.6
Vlan ID: not advertised
Chassis id: 7070.8b8c.f7cf
Port id: Eth1/49
Local Port id: Eth1/7
Port Description: topology/pod-1/paths-101/pathep-[eth1/49]
System Name: nw-01-01-16-00
System Description: topology/pod-1/node-101
Time remaining: 98 seconds
System Capabilities: B, R
Enabled Capabilities: B, R
Management Address: 10.254.15.1
Vlan ID: not advertised
Chassis id: 707d.b986.f7ef
Port id: Eth1/49
Local Port id: Eth1/8
Port Description: topology/pod-1/paths-102/pathep-[eth1/49]
System Name: nw-01-01-17-00
System Description: topology/pod-1/node-102
Time remaining: 98 seconds
System Capabilities: B, R
Enabled Capabilities: B, R
Management Address: 10.254.15.2
Vlan ID: not advertised
Chassis id: 380e.4d8f.b229
Port id: Eth1/25
Local Port id: Eth1/9
Port Description: topology/pod-1/paths-171/pathep-[eth1/25]
System Name: nw-01-17-36-00
System Description: topology/pod-1/node-171
Time remaining: 114 seconds
System Capabilities: B, R
Enabled Capabilities: B, R
Management Address: 10.254.15.71
Vlan ID: not advertised
Chassis id: 380e.4d14.ab79
Port id: Eth1/25
Local Port id: Eth1/10
Port Description: topology/pod-1/paths-172/pathep-[eth1/25]
System Name: nw-01-17-35-00
System Description: topology/pod-1/node-172
Time remaining: 96 seconds
System Capabilities: B, R
Enabled Capabilities: B, R
Management Address: 10.254.15.72
Vlan ID: not advertised
Chassis id: 707d.b986.8757
Port id: Eth1/25
Local Port id: Eth1/11
Port Description: topology/pod-1/paths-181/pathep-[eth1/25]
System Name: nw-01-04-20-00
System Description: topology/pod-1/node-181
Time remaining: 111 seconds
System Capabilities: B, R
Enabled Capabilities: B, R
Management Address: 10.254.15.81
Vlan ID: not advertised
Chassis id: 380e.4d14.ad59
Port id: Eth1/25
Local Port id: Eth1/12
Port Description: topology/pod-1/paths-182/pathep-[eth1/25]
System Name: nw-01-04-19-00
System Description: topology/pod-1/node-182
Time remaining: 113 seconds
System Capabilities: B, R
Enabled Capabilities: B, R
Management Address: 10.254.15.82
Vlan ID: not advertised
Chassis id: 707d.b99d.fcd1
Port id: Eth1/49
Local Port id: Eth1/13
Port Description: topology/pod-1/paths-3911/pathep-[eth1/49]
System Name: nw-00-03-30-00
System Description: topology/pod-1/node-3911
Time remaining: 96 seconds
System Capabilities: B, R
Enabled Capabilities: B, R
Management Address: 10.254.7.118
Vlan ID: not advertised
Chassis id: 707d.b99e.26b1
Port id: Eth1/49
Local Port id: Eth1/14
Port Description: topology/pod-1/paths-3912/pathep-[eth1/49]
System Name: nw-00-03-31-00
System Description: topology/pod-1/node-3912
Time remaining: 101 seconds
System Capabilities: B, R
Enabled Capabilities: B, R
Management Address: 10.254.7.119
Vlan ID: not advertised
Chassis id: 707d.b99d.ff01
Port id: Eth1/49
Local Port id: Eth1/15
Port Description: topology/pod-1/paths-3913/pathep-[eth1/49]
System Name: nw-00-03-32-00
System Description: topology/pod-1/node-3913
Time remaining: 94 seconds
System Capabilities: B, R
Enabled Capabilities: B, R
Management Address: 10.254.7.120
Vlan ID: not advertised
Chassis id: 707d.b99e.2701
Port id: Eth1/49
Local Port id: Eth1/16
Port Description: topology/pod-1/paths-3914/pathep-[eth1/49]
System Name: nw-00-03-33-00
System Description: topology/pod-1/node-3914
Time remaining: 106 seconds
System Capabilities: B, R
Enabled Capabilities: B, R
Management Address: 10.254.7.121
Vlan ID: not advertised
Chassis id: 6cb2.ae3e.bb93
Port id: Eth1/49
Local Port id: Eth1/18
Port Description: topology/pod-1/paths-3831/pathep-[eth1/49]
System Name: nw-00-01-29-00
System Description: topology/pod-1/node-3831
Time remaining: 98 seconds
System Capabilities: B, R
Enabled Capabilities: B, R
Management Address: 10.254.7.31
Vlan ID: not advertised
Chassis id: 6cb2.ae3e.b7d3
Port id: Eth1/49
Local Port id: Eth1/19
Port Description: topology/pod-1/paths-3832/pathep-[eth1/49]
System Name: nw-00-01-30-00
System Description: topology/pod-1/node-3832
Time remaining: 98 seconds
System Capabilities: B, R
Enabled Capabilities: B, R
Management Address: 10.254.7.32
Vlan ID: not advertised
Chassis id: 6cb2.ae3e.bbe3
Port id: Eth1/49
Local Port id: Eth1/20
Port Description: topology/pod-1/paths-131/pathep-[eth1/49]
System Name: nw-01-08-34-00
System Description: topology/pod-1/node-131
Time remaining: 104 seconds
System Capabilities: B, R
Enabled Capabilities: B, R
Management Address: 10.254.15.31
Vlan ID: not advertised
Chassis id: 6cb2.ae3e.baf3
Port id: Eth1/49
Local Port id: Eth1/21
Port Description: topology/pod-1/paths-132/pathep-[eth1/49]
System Name: nw-01-08-35-00
System Description: topology/pod-1/node-132
Time remaining: 98 seconds
System Capabilities: B, R
Enabled Capabilities: B, R
Management Address: 10.254.15.32
Vlan ID: not advertised
Chassis id: b4de.31db.2c41
Port id: Eth1/49
Local Port id: Eth1/22
Port Description: topology/pod-1/paths-3833/pathep-[eth1/49]
System Name: nw-00-09-25-00
System Description: topology/pod-1/node-3833
Time remaining: 98 seconds
System Capabilities: B, R
Enabled Capabilities: B, R
Management Address: 10.254.7.33
Vlan ID: not advertised
Chassis id: b4de.31db.39b1
Port id: Eth1/49
Local Port id: Eth1/23
Port Description: topology/pod-1/paths-3834/pathep-[eth1/49]
System Name: nw-00-09-26-00
System Description: topology/pod-1/node-3834
Time remaining: 98 seconds
System Capabilities: B, R
Enabled Capabilities: B, R
Management Address: 10.254.7.34
Vlan ID: not advertised
Chassis id: 6cb2.ae7b.3929
Port id: Eth1/31
Local Port id: Eth1/25
Port Description: topology/pod-1/paths-3101/pathep-[eth1/31]
System Name: nw-31-08-41-00
System Description: topology/pod-1/node-3101
Time remaining: 116 seconds
System Capabilities: B, R
Enabled Capabilities: B, R
Management Address: 10.254.255.1
Vlan ID: not advertised
Chassis id: 6cb2.ae02.6f79
Port id: Eth1/31
Local Port id: Eth1/26
Port Description: topology/pod-1/paths-3102/pathep-[eth1/31]
System Name: nw-31-08-42-00
System Description: topology/pod-1/node-3102
Time remaining: 89 seconds
System Capabilities: B, R
Enabled Capabilities: B, R
Management Address: 10.254.255.2
Vlan ID: not advertised
Chassis id: 6cb2.ae02.8b3b
Port id: Eth1/31
Local Port id: Eth1/27
Port Description: topology/pod-1/paths-3103/pathep-[eth1/31]
System Name: nw-31-09-36-00
System Description: topology/pod-1/node-3103
Time remaining: 100 seconds
System Capabilities: B, R
Enabled Capabilities: B, R
Management Address: 10.254.255.3
Vlan ID: not advertised
Chassis id: 6cb2.ae02.a52b
Port id: Eth1/31
Local Port id: Eth1/28
Port Description: topology/pod-1/paths-3104/pathep-[eth1/31]
System Name: nw-31-09-37-00
System Description: topology/pod-1/node-3104
Time remaining: 97 seconds
System Capabilities: B, R
Enabled Capabilities: B, R
Management Address: 10.254.255.4
Vlan ID: not advertised
Chassis id: 00ee.ab3a.6283
Port id: Eth1/53
Local Port id: Eth1/29
Port Description: topology/pod-1/paths-3808/pathep-[eth1/53]
System Name: nw-00-12-29-00
System Description: topology/pod-1/node-3808
Time remaining: 98 seconds
System Capabilities: B, R
Enabled Capabilities: B, R
Management Address: 10.254.7.8
Vlan ID: not advertised
Chassis id: 00ee.ab3a.65f3
Port id: Eth1/53
Local Port id: Eth1/30
Port Description: topology/pod-1/paths-3807/pathep-[eth1/53]
System Name: nw-00-12-30-00
System Description: topology/pod-1/node-3807
Time remaining: 105 seconds
System Capabilities: B, R
Enabled Capabilities: B, R
Management Address: 10.254.7.7
Vlan ID: not advertised
Chassis id: 2c4f.5256.df97
Port id: Eth1/31
Local Port id: Eth1/31
Port Description: topology/pod-1/paths-173/pathep-[eth1/31]
System Name: nw-01-19-35-00_H08
System Description: topology/pod-1/node-173
Time remaining: 93 seconds
System Capabilities: B, R
Enabled Capabilities: B, R
Management Address: 10.254.15.73
Vlan ID: not advertised
Chassis id: 70ea.1a21.b12b
Port id: Eth1/31
Local Port id: Eth1/32
Port Description: topology/pod-1/paths-174/pathep-[eth1/31]
System Name: nw-01-19-36-00_H08
System Description: topology/pod-1/node-174
Time remaining: 89 seconds
System Capabilities: B, R
Enabled Capabilities: B, R
Management Address: 10.254.15.74
Vlan ID: not advertised
Chassis id: ac3a.6759.9895
Port id: Eth1/53
Local Port id: Eth2/1
Port Description: topology/pod-1/paths-103/pathep-[eth1/53]
System Name: nw-01-34-28-00
System Description: topology/pod-1/node-103
Time remaining: 98 seconds
System Capabilities: B, R
Enabled Capabilities: B, R
Management Address: 10.254.15.3
Vlan ID: not advertised
Chassis id: ac3a.67d2.0f05
Port id: Eth1/53
Local Port id: Eth2/2
Port Description: topology/pod-1/paths-104/pathep-[eth1/53]
System Name: nw-01-34-29-00
System Description: topology/pod-1/node-104
Time remaining: 98 seconds
System Capabilities: B, R
Enabled Capabilities: B, R
Management Address: 10.254.15.4
Vlan ID: not advertised
Chassis id: c4b2.39a7.4c45
Port id: Eth1/53
Local Port id: Eth2/3
Port Description: topology/pod-1/paths-3809/pathep-[eth1/53]
System Name: nw-00-12-32-00
System Description: topology/pod-1/node-3809
Time remaining: 99 seconds
System Capabilities: B, R
Enabled Capabilities: B, R
Management Address: 10.254.7.9
Vlan ID: not advertised
Chassis id: 4ce1.7548.05d5
Port id: Eth1/53
Local Port id: Eth2/4
Port Description: topology/pod-1/paths-3810/pathep-[eth1/53]
System Name: nw-00-12-33-00
System Description: topology/pod-1/node-3810
Time remaining: 98 seconds
System Capabilities: B, R
Enabled Capabilities: B, R
Management Address: 10.254.7.10
Vlan ID: not advertised
Chassis id: ac3a.67d2.1bd5
Port id: Eth1/53
Local Port id: Eth2/5
Port Description: topology/pod-1/paths-3105/pathep-[eth1/53]
System Name: nw-31-61-32-00
System Description: topology/pod-1/node-3105
Time remaining: 98 seconds
System Capabilities: B, R
Enabled Capabilities: B, R
Management Address: 10.254.255.5
Vlan ID: not advertised
Chassis id: ac3a.67d2.1bd6
Port id: Eth1/54
Local Port id: Eth2/6
Port Description: topology/pod-1/paths-3105/pathep-[eth1/54]
System Name: nw-31-61-32-00
System Description: topology/pod-1/node-3105
Time remaining: 98 seconds
System Capabilities: B, R
Enabled Capabilities: B, R
Management Address: 10.254.255.5
Vlan ID: not advertised
Chassis id: ac3a.671a.f815
Port id: Eth1/53
Local Port id: Eth2/7
Port Description: topology/pod-1/paths-3106/pathep-[eth1/53]
System Name: nw-31-61-33-00
System Description: topology/pod-1/node-3106
Time remaining: 98 seconds
System Capabilities: B, R
Enabled Capabilities: B, R
Management Address: 10.254.255.6
Vlan ID: not advertised
Chassis id: ac3a.671a.f816
Port id: Eth1/54
Local Port id: Eth2/8
Port Description: topology/pod-1/paths-3106/pathep-[eth1/54]
System Name: nw-31-61-33-00
System Description: topology/pod-1/node-3106
Time remaining: 98 seconds
System Capabilities: B, R
Enabled Capabilities: B, R
Management Address: 10.254.255.6
Vlan ID: not advertised
Chassis id: ac3a.670d.7def
Port id: Eth1/35
Local Port id: Eth2/9
Port Description: topology/pod-1/paths-2501/pathep-[eth1/35]
System Name: nw-25-01-32-00_H10
System Description: topology/pod-1/node-2501
Time remaining: 114 seconds
System Capabilities: B, R
Enabled Capabilities: B, R
Management Address: 10.254.200.118
Vlan ID: not advertised
Chassis id: ac3a.670d.7df0
Port id: Eth1/36
Local Port id: Eth2/10
Port Description: topology/pod-1/paths-2501/pathep-[eth1/36]
System Name: nw-25-01-32-00_H10
System Description: topology/pod-1/node-2501
Time remaining: 114 seconds
System Capabilities: B, R
Enabled Capabilities: B, R
Management Address: 10.254.200.118
Vlan ID: not advertised
Chassis id: ac3a.670d.7f27
Port id: Eth1/35
Local Port id: Eth2/11
Port Description: topology/pod-1/paths-2502/pathep-[eth1/35]
System Name: nw-25-01-33-00_H10
System Description: topology/pod-1/node-2502
Time remaining: 114 seconds
System Capabilities: B, R
Enabled Capabilities: B, R
Management Address: 10.254.200.119
Vlan ID: not advertised
Chassis id: ac3a.670d.7f28
Port id: Eth1/36
Local Port id: Eth2/12
Port Description: topology/pod-1/paths-2502/pathep-[eth1/36]
System Name: nw-25-01-33-00_H10
System Description: topology/pod-1/node-2502
Time remaining: 114 seconds
System Capabilities: B, R
Enabled Capabilities: B, R
Management Address: 10.254.200.119
Vlan ID: not advertised
Chassis id: e8eb.34ea.3e6b
Port id: Eth1/31
Local Port id: Eth2/13
Port Description: topology/pod-1/paths-133/pathep-[eth1/31]
System Name: nw-01-23-26-00
System Description: topology/pod-1/node-133
Time remaining: 97 seconds
System Capabilities: B, R
Enabled Capabilities: B, R
Management Address: 10.254.15.33
Vlan ID: not advertised
Chassis id: e8eb.34ea.3e6c
Port id: Eth1/32
Local Port id: Eth2/14
Port Description: topology/pod-1/paths-133/pathep-[eth1/32]
System Name: nw-01-23-26-00
System Description: topology/pod-1/node-133
Time remaining: 97 seconds
System Capabilities: B, R
Enabled Capabilities: B, R
Management Address: 10.254.15.33
Vlan ID: not advertised
Chassis id: e8eb.3493.a4bf
Port id: Eth1/31
Local Port id: Eth2/15
Port Description: topology/pod-1/paths-134/pathep-[eth1/31]
System Name: nw-01-23-27-00
System Description: topology/pod-1/node-134
Time remaining: 98 seconds
System Capabilities: B, R
Enabled Capabilities: B, R
Management Address: 10.254.15.34
Vlan ID: not advertised
Chassis id: e8eb.3493.a4c0
Port id: Eth1/32
Local Port id: Eth2/16
Port Description: topology/pod-1/paths-134/pathep-[eth1/32]
System Name: nw-01-23-27-00
System Description: topology/pod-1/node-134
Time remaining: 98 seconds
System Capabilities: B, R
Enabled Capabilities: B, R
Management Address: 10.254.15.34
Vlan ID: not advertised
Total entries displayed: 46
nw-00-03-06-00#
"""
class LldpParser:
# nw-00-03-06-00# show lldp neighbors detail
# Capability codes:
# (R) Router, (B) Bridge, (T) Telephone, (C) DOCSIS Cable Device
# (W) WLAN Access Point, (P) Repeater, (S) Station, (O) Other
# Device ID Local Intf Hold-time Capability Port ID
#
# Chassis id: 500f.8079.e2e3
# Port id: Eth1/49
# Local Port id: Eth1/1
# Port Description: topology/pod-1/paths-3801/pathep-[eth1/49]
# System Name: nw-00-01-19-00
# System Description: topology/pod-1/node-3801
# Time remaining: 91 seconds
# System Capabilities: B, R
# Enabled Capabilities: B, R
# Management Address: 10.254.7.1
# Vlan ID: not advertised
def __init__(self) -> None:
# compile regexp
self.re_system_name = r'^System Name:\s+(?P<sysname>\S+)'
self.re_mgmt_address = r'^Management Address:\s+(?P<addr>\S+)'
def parse_file(self, filepath:str) -> list:
text_str = None
with open(filepath, 'r') as f:
text_str = f.read()
return self.parse_text(text_str)
def parse_text(self, text_str:str) -> list:
lines = text_str.splitlines()
return self.parse_lines(lines)
@staticmethod
def exists(neighbor_list, sysname):
return next((nbr for nbr in neighbor_list if nbr['sysname'] == sysname), False)
def parse_lines(self, lines:list) -> list:
neighbor_entries = []
for neighbor in self.match_lines(lines):
sysname = neighbor.get('sysname', '')
if not LldpParser.exists(neighbor_entries, sysname):
neighbor_entries.append(neighbor)
return neighbor_entries
def match_lines(self, lines:list):
"""行の配列を走査して正規表現に一致した部分を取り出しLldpNeighborEntryオブジェクトをyieldする"""
current_sysname = None
for line in lines:
match = re.search(self.re_system_name, line)
if match:
current_sysname = match.group('sysname')
continue
match = re.match(self.re_mgmt_address, line)
if match:
addr = match.group('addr')
d = {}
d['sysname'] = current_sysname
d['addr'] = addr
yield d
if __name__ == '__main__':
def here(path=''):
return os.path.abspath(os.path.join(os.path.dirname(__file__), path))
app_home = here(".")
GROUP_NAME = "leaf_switches"
def get_inventory():
parser = LldpParser()
neighbors = parser.parse_text(ACI_SHOW_LLDP_NEIGHBORS_DETAIL)
# print(neighbors)
hosts = []
hostvars = {}
for nbr in neighbors:
# {'sysname': 'nw-00-03-30-00', 'addr': '10.254.7.118'},
sysname = nbr.get('sysname', None)
addr = nbr.get('addr', None)
if not sysname or not addr:
continue
hosts.append(sysname)
hostvars[sysname] = {
'ansible_host': addr
}
inventory = {}
inventory[GROUP_NAME] = {
'hosts': hosts
}
inventory['_meta'] = {
'hostvars': hostvars
}
return inventory
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--list', action='store_true')
parser.add_argument('--host', action='store')
args = parser.parse_args()
# --list
if args.list:
inventory = get_inventory()
print(json.dumps(inventory, indent=2))
return 0
# --host [hostname]
if args.host:
inventory = get_inventory()
d = inventory['_meta']['hostvars'].get(args.host, None)
if d is None:
return 1
print(json.dumps(d, indent=2))
return 0
return 0
sys.exit(main())
| 27.91922
| 87
| 0.729971
| 3,300
| 20,046
| 4.408182
| 0.109394
| 0.039596
| 0.14216
| 0.148622
| 0.823812
| 0.818863
| 0.782292
| 0.703719
| 0.435416
| 0.388809
| 0
| 0.126386
| 0.158485
| 20,046
| 717
| 88
| 27.958159
| 0.735965
| 0.042003
| 0
| 0.432119
| 0
| 0.003311
| 0.840063
| 0.159207
| 0
| 0
| 0
| 0
| 0
| 1
| 0.014901
| false
| 0
| 0.008278
| 0.003311
| 0.041391
| 0.003311
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
be8c89f54961e0083bc99f381a5ad49ae46d3a0f
| 28
|
py
|
Python
|
tests/tests.py
|
kokonut27/owapi.py
|
171722cf75f396577e73c6c6e34e042a7510946d
|
[
"MIT"
] | 2
|
2022-03-14T18:12:55.000Z
|
2022-03-15T00:57:09.000Z
|
tests/tests.py
|
kokonut27/owapi.py
|
171722cf75f396577e73c6c6e34e042a7510946d
|
[
"MIT"
] | null | null | null |
tests/tests.py
|
kokonut27/owapi.py
|
171722cf75f396577e73c6c6e34e042a7510946d
|
[
"MIT"
] | null | null | null |
import owapi
owapi.Client()
| 9.333333
| 14
| 0.785714
| 4
| 28
| 5.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.107143
| 28
| 3
| 14
| 9.333333
| 0.88
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
be93913529182d596542bccff6ab91a61d88d8a4
| 261
|
py
|
Python
|
dexp/datasets/__init__.py
|
royerloic/dexp
|
1f132d21ac2093725a5a83bc20fa0a45ae09bfe6
|
[
"BSD-3-Clause"
] | null | null | null |
dexp/datasets/__init__.py
|
royerloic/dexp
|
1f132d21ac2093725a5a83bc20fa0a45ae09bfe6
|
[
"BSD-3-Clause"
] | null | null | null |
dexp/datasets/__init__.py
|
royerloic/dexp
|
1f132d21ac2093725a5a83bc20fa0a45ae09bfe6
|
[
"BSD-3-Clause"
] | null | null | null |
from dexp.datasets.base_dataset import BaseDataset
from dexp.datasets.clearcontrol_dataset import CCDataset
from dexp.datasets.joined_dataset import JoinedDataset
from dexp.datasets.tiff_dataset import TIFDataset
from dexp.datasets.zarr_dataset import ZDataset
| 43.5
| 56
| 0.885057
| 35
| 261
| 6.457143
| 0.428571
| 0.176991
| 0.353982
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.076628
| 261
| 5
| 57
| 52.2
| 0.937759
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
be9f272fbb8ad4237daf7d8186aa07fed9c5e8af
| 53
|
py
|
Python
|
src/mvg/__init__.py
|
dfrommi/alfred-mvv
|
5310f80ca3e17686fb534db0e53a613043a1b352
|
[
"MIT"
] | 2
|
2019-07-07T19:24:15.000Z
|
2019-10-16T09:07:25.000Z
|
src/mvg/__init__.py
|
dfrommi/alfred-mvv
|
5310f80ca3e17686fb534db0e53a613043a1b352
|
[
"MIT"
] | 1
|
2020-06-05T16:49:17.000Z
|
2020-06-05T16:49:17.000Z
|
src/mvg/__init__.py
|
dfrommi/alfred-mvv
|
5310f80ca3e17686fb534db0e53a613043a1b352
|
[
"MIT"
] | 2
|
2017-04-03T11:47:59.000Z
|
2019-10-16T09:09:26.000Z
|
from .api import MVG
from .favorites import Favorites
| 26.5
| 32
| 0.830189
| 8
| 53
| 5.5
| 0.625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.132075
| 53
| 2
| 32
| 26.5
| 0.956522
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
bea655014c2b724ce99c64009d7a4d4ab3389225
| 540
|
py
|
Python
|
app/orders/admin.py
|
jcazallasc/burriking-citibox
|
30f0a4520bedb6b9ba613e8cf279b37f0cc60704
|
[
"MIT"
] | null | null | null |
app/orders/admin.py
|
jcazallasc/burriking-citibox
|
30f0a4520bedb6b9ba613e8cf279b37f0cc60704
|
[
"MIT"
] | null | null | null |
app/orders/admin.py
|
jcazallasc/burriking-citibox
|
30f0a4520bedb6b9ba613e8cf279b37f0cc60704
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from orders.infrastructure.persistence.django.offer import Offer
from orders.infrastructure.persistence.django.order import Order
from orders.infrastructure.persistence.django.order_line import OrderLine
from orders.infrastructure.persistence.django.product import Product
from orders.infrastructure.persistence.django.product_option import ProductOption
admin.site.register(Offer)
admin.site.register(Order)
admin.site.register(OrderLine)
admin.site.register(Product)
admin.site.register(ProductOption)
| 36
| 81
| 0.864815
| 67
| 540
| 6.940299
| 0.253731
| 0.107527
| 0.258065
| 0.376344
| 0.492473
| 0.404301
| 0
| 0
| 0
| 0
| 0
| 0
| 0.059259
| 540
| 14
| 82
| 38.571429
| 0.915354
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.545455
| 0
| 0.545455
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
fe4be7fdfd9987b5f18d2cfd4b255ee7765a48bf
| 38
|
py
|
Python
|
experiments/multiwoz_lstm/utils/__init__.py
|
chandar-lab/CriticalGradientOptimization
|
1af4b1df40489991289bb50bb69859a00b2c97c6
|
[
"MIT"
] | 1
|
2021-07-12T03:13:39.000Z
|
2021-07-12T03:13:39.000Z
|
experiments/multiwoz_lstm/utils/__init__.py
|
chandar-lab/CriticalGradientOptimization
|
1af4b1df40489991289bb50bb69859a00b2c97c6
|
[
"MIT"
] | null | null | null |
experiments/multiwoz_lstm/utils/__init__.py
|
chandar-lab/CriticalGradientOptimization
|
1af4b1df40489991289bb50bb69859a00b2c97c6
|
[
"MIT"
] | null | null | null |
from nlp import BLEUScorer, normalize
| 19
| 37
| 0.842105
| 5
| 38
| 6.4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.131579
| 38
| 1
| 38
| 38
| 0.969697
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
fe5c99f7fd494dab658a70b505bc7f1b2947e4c3
| 96
|
py
|
Python
|
venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/plugins/do_symfix.py
|
GiulianaPola/select_repeats
|
17a0d053d4f874e42cf654dd142168c2ec8fbd11
|
[
"MIT"
] | 2
|
2022-03-13T01:58:52.000Z
|
2022-03-31T06:07:54.000Z
|
venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/plugins/do_symfix.py
|
DesmoSearch/Desmobot
|
b70b45df3485351f471080deb5c785c4bc5c4beb
|
[
"MIT"
] | 19
|
2021-11-20T04:09:18.000Z
|
2022-03-23T15:05:55.000Z
|
venv/lib/python3.8/site-packages/debugpy/_vendored/pydevd/pydevd_attach_to_process/winappdbg/plugins/do_symfix.py
|
DesmoSearch/Desmobot
|
b70b45df3485351f471080deb5c785c4bc5c4beb
|
[
"MIT"
] | null | null | null |
/home/runner/.cache/pip/pool/b9/fe/0e/38f1a7d78dc8044fb235cdf6eac06a38d2e513f9e55cd279bc35c179d7
| 96
| 96
| 0.895833
| 9
| 96
| 9.555556
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.375
| 0
| 96
| 1
| 96
| 96
| 0.520833
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
fe687c6db3c9be722a95d6ae6cee2900f88bdaca
| 85
|
py
|
Python
|
app/extension/__init__.py
|
yntonfon/dashboard
|
287e7b2d895916102236243c1051da1e5ee3756e
|
[
"MIT"
] | null | null | null |
app/extension/__init__.py
|
yntonfon/dashboard
|
287e7b2d895916102236243c1051da1e5ee3756e
|
[
"MIT"
] | null | null | null |
app/extension/__init__.py
|
yntonfon/dashboard
|
287e7b2d895916102236243c1051da1e5ee3756e
|
[
"MIT"
] | null | null | null |
from .bcrypt import bcrypt
from .mail import mail
from .sqlalchemy import sqlalchemy
| 21.25
| 34
| 0.823529
| 12
| 85
| 5.833333
| 0.416667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.141176
| 85
| 3
| 35
| 28.333333
| 0.958904
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
fe7865369f15eea858032f73db7500572b59aaf9
| 133
|
py
|
Python
|
CuConnect/pims/test.py
|
Shreyans13/Xenial-Xerus
|
6a42886eb6f882f9772689ea40c8e7dab75e678a
|
[
"MIT"
] | null | null | null |
CuConnect/pims/test.py
|
Shreyans13/Xenial-Xerus
|
6a42886eb6f882f9772689ea40c8e7dab75e678a
|
[
"MIT"
] | null | null | null |
CuConnect/pims/test.py
|
Shreyans13/Xenial-Xerus
|
6a42886eb6f882f9772689ea40c8e7dab75e678a
|
[
"MIT"
] | null | null | null |
from .uims_api import SessionUIMS
import os
acc = SessionUIMS(os.getenv("UIMS_UID"), os.getenv("UIMS_PASS"))
acc.available_sessions
| 22.166667
| 64
| 0.789474
| 20
| 133
| 5.05
| 0.6
| 0.158416
| 0.237624
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090226
| 133
| 5
| 65
| 26.6
| 0.834711
| 0
| 0
| 0
| 0
| 0
| 0.12782
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.25
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
|
0
| 5
|
fe890d52af79eeb6b7e392dfa6a2ae0cb6d483bb
| 244
|
py
|
Python
|
settings.py
|
fcoclavero/textpreprocess
|
09f646b74cd36f020c2a45fcca5bca520929cfef
|
[
"MIT"
] | null | null | null |
settings.py
|
fcoclavero/textpreprocess
|
09f646b74cd36f020c2a45fcca5bca520929cfef
|
[
"MIT"
] | 7
|
2019-06-28T07:37:06.000Z
|
2022-02-09T20:08:54.000Z
|
settings.py
|
fcoclavero/textpreprocess
|
09f646b74cd36f020c2a45fcca5bca520929cfef
|
[
"MIT"
] | 1
|
2019-05-31T03:06:13.000Z
|
2019-05-31T03:06:13.000Z
|
import os
EN = {"ALLOWED_PUNCTUATION_MARKS": ".,!?;", "PYSPELL_LANGUAGE": "en"}
ES = {
"ALLOWED_PUNCTUATION_MARKS": ".,!?;",
"PYSPELL_LANGUAGE": "es",
"LEMMAS_PATH": os.path.join(os.path.dirname(__file__), "lemmas", "es.json"),
}
| 24.4
| 80
| 0.622951
| 28
| 244
| 5.035714
| 0.535714
| 0.255319
| 0.326241
| 0.425532
| 0.539007
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.139344
| 244
| 9
| 81
| 27.111111
| 0.671429
| 0
| 0
| 0
| 0
| 0
| 0.491803
| 0.204918
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.142857
| 0
| 0.142857
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
fe8c935f6efe92ee2dd3370eb687fca0cd2bd27f
| 222
|
py
|
Python
|
brian2/devices/cpp_standalone/__init__.py
|
SimonAltrogge/brian2
|
6463c368a8277041051bf5ae4816f0dd5b6e057c
|
[
"BSD-2-Clause"
] | 674
|
2015-01-14T11:05:39.000Z
|
2022-03-29T04:53:50.000Z
|
brian2/devices/cpp_standalone/__init__.py
|
JongwanKim2090/brian2
|
c212a57cb992b766786b5769ebb830ff12d8a8ad
|
[
"BSD-2-Clause"
] | 937
|
2015-01-05T13:24:22.000Z
|
2022-03-25T13:10:13.000Z
|
brian2/devices/cpp_standalone/__init__.py
|
JongwanKim2090/brian2
|
c212a57cb992b766786b5769ebb830ff12d8a8ad
|
[
"BSD-2-Clause"
] | 237
|
2015-01-05T13:54:16.000Z
|
2022-03-15T22:16:32.000Z
|
"""
Package implementing the C++ "standalone" `Device` and `CodeObject`.
"""
from .codeobject import CPPStandaloneCodeObject
from .device import cpp_standalone_device
from .GSLcodeobject import GSLCPPStandaloneCodeObject
| 27.75
| 68
| 0.815315
| 22
| 222
| 8.136364
| 0.636364
| 0.178771
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.103604
| 222
| 7
| 69
| 31.714286
| 0.899497
| 0.306306
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
fe90aa2fd06fb8713d9aa3b9f64b740414057665
| 5,021
|
py
|
Python
|
perde-tests/tests/test_default.py
|
YushiOMOTE/perde
|
beeb3208ea2d6edcc4df2b5d74834fadd2807fbc
|
[
"MIT"
] | 19
|
2020-10-29T11:38:19.000Z
|
2022-03-13T03:14:21.000Z
|
perde-tests/tests/test_default.py
|
YushiOMOTE/perde
|
beeb3208ea2d6edcc4df2b5d74834fadd2807fbc
|
[
"MIT"
] | 19
|
2020-10-29T08:02:10.000Z
|
2020-12-22T06:25:48.000Z
|
perde-tests/tests/test_default.py
|
YushiOMOTE/perde
|
beeb3208ea2d6edcc4df2b5d74834fadd2807fbc
|
[
"MIT"
] | 1
|
2021-05-06T07:38:20.000Z
|
2021-05-06T07:38:20.000Z
|
from dataclasses import dataclass, field
import perde
import pytest
from util import FORMATS
"""rust
#[derive(Serialize, Debug, new)]
struct DefaultConstruct {
a: String,
c: u64,
}
add!(DefaultConstruct {"xxx".into(), 3});
"""
@pytest.mark.parametrize("m", FORMATS)
def test_default(m):
@perde.attr(default=True)
@dataclass
class DefaultConstruct:
a: str
b: str
c: int
p = m.unpack_data("DefaultConstruct", astype=DefaultConstruct)
assert p == DefaultConstruct("xxx", "", 3)
@pytest.mark.parametrize("m", FORMATS)
def test_field_default(m):
@dataclass
class DefaultConstruct2:
a: str
b: str = field(metadata={"perde_default": True})
c: int
p = m.unpack_data("DefaultConstruct", astype=DefaultConstruct2)
assert p == DefaultConstruct2("xxx", "", 3)
@pytest.mark.parametrize("m", FORMATS)
def test_field_default_value(m):
@dataclass
class DefaultConstruct3:
a: str
b: str = field(default="hage")
c: int = field(default=99)
p = m.unpack_data("DefaultConstruct", astype=DefaultConstruct3)
assert p == DefaultConstruct3("xxx", "hage", 3)
@pytest.mark.parametrize("m", FORMATS)
def test_field_default_factory(m):
@dataclass
class DefaultConstruct4:
a: str
b: str = field(default_factory=lambda: "hage")
c: int = field(default=99)
p = m.unpack_data("DefaultConstruct", astype=DefaultConstruct4)
assert p == DefaultConstruct4("xxx", "hage", 3)
"""rust
#[derive(Serialize, Debug, new)]
struct Skip {
x: String,
y: u64,
z: f64,
a: String,
b: String,
}
#[derive(Serialize, Debug, new)]
struct Skipped {
x: String,
#[serde(skip)]
y: u64,
z: f64,
a: String,
b: String,
}
#[derive(Serialize, Debug, new)]
struct SkipDefault {
x: String,
y: u64,
z: f64,
a: String,
b: String,
}
add!(Skip {"ssssss".into(), 3, 1.1, "a".into(), "b".into()});
add!(Skipped {"ssssss".into(), 3, 1.1, "a".into(), "b".into()});
add!(SkipDefault {"ssssss".into(), 0, 1.1, "a".into(), "b".into()});
"""
@pytest.mark.parametrize("m", FORMATS)
def test_skip_with_default(m):
@perde.attr(default=True)
@dataclass
class Skip:
x: str
y: int = field(metadata={"perde_skip": True})
z: float
a: str
b: str
p = m.unpack_data("Skip", astype=Skip)
q = m.unpack_data("Skipped", astype=Skip)
assert p == q
assert p == Skip("ssssss", 0, 1.1, "a", "b")
assert m.dumps(p) == m.data("Skipped")
@pytest.mark.parametrize("m", FORMATS)
def test_field_skip_with_default(m):
@dataclass
class Skip:
x: str
y: int = field(metadata={"perde_skip": True, "perde_default": True})
z: float
a: str
b: str
p = m.unpack_data("Skip", astype=Skip)
q = m.unpack_data("Skipped", astype=Skip)
assert p == q
assert p == Skip("ssssss", 0, 1.1, "a", "b")
assert m.dumps(p) == m.data("Skipped")
@pytest.mark.parametrize("m", FORMATS)
def test_field_skip_with_default_value(m):
@dataclass
class Skip:
x: str
y: int = field(default=4, metadata={"perde_skip": True})
z: float = field(default=1.3)
a: str = field(default_factory=lambda: "aaxx")
b: str = field(default_factory=lambda: "bbcc")
p = m.unpack_data("Skip", astype=Skip)
q = m.unpack_data("Skipped", astype=Skip)
assert p == q
assert p == Skip("ssssss", 4, 1.1, "a", "b")
assert m.dumps(p) == m.data("Skipped")
@pytest.mark.parametrize("m", FORMATS)
def test_field_skip_with_default_factory(m):
@dataclass
class Skip:
x: str
y: int = field(default_factory=lambda: 400, metadata={"perde_skip": True})
z: float = field(default=1.3)
a: str = field(default_factory=lambda: "aaxx")
b: str = field(default_factory=lambda: "bbcc")
p = m.unpack_data("Skip", astype=Skip)
q = m.unpack_data("Skipped", astype=Skip)
assert p == q
assert p == Skip("ssssss", 400, 1.1, "a", "b")
assert m.dumps(p) == m.data("Skipped")
@pytest.mark.parametrize("m", FORMATS)
def test_field_skip_serializing(m):
@dataclass
class Skip:
x: str
y: int = field(metadata={"perde_skip_serializing": True})
z: float
a: str
b: str
p = m.unpack_data("Skip", astype=Skip)
with pytest.raises(Exception):
m.unpack_data("Skipped", astype=Skip)
assert p == Skip("ssssss", 3, 1.1, "a", "b")
assert m.dumps(p) == m.data("Skipped")
@pytest.mark.parametrize("m", FORMATS)
def test_skip_deserializing_with_default(m):
@perde.attr(default=True)
@dataclass
class Skip:
x: str
y: int = field(metadata={"perde_skip_deserializing": True})
z: float
a: str
b: str
p = m.unpack_data("Skip", astype=Skip)
q = m.unpack_data("Skipped", astype=Skip)
assert p == q
assert p == Skip("ssssss", 0, 1.1, "a", "b")
assert m.dumps(p) == m.data("SkipDefault")
| 24.73399
| 82
| 0.60705
| 688
| 5,021
| 4.335756
| 0.106105
| 0.010727
| 0.059001
| 0.073751
| 0.790479
| 0.768019
| 0.724103
| 0.724103
| 0.645659
| 0.645659
| 0
| 0.017806
| 0.228241
| 5,021
| 202
| 83
| 24.856436
| 0.752
| 0
| 0
| 0.677165
| 0
| 0
| 0.090236
| 0.010643
| 0
| 0
| 0
| 0
| 0.165354
| 1
| 0.07874
| false
| 0
| 0.031496
| 0
| 0.519685
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
feaad1d55945578995753646c07646057c435902
| 605
|
py
|
Python
|
chat/__init__.py
|
yuru-yuri/websocket-chat
|
8f11d950be08b44e490c18bbcf0f49fdaaf643a9
|
[
"MIT"
] | null | null | null |
chat/__init__.py
|
yuru-yuri/websocket-chat
|
8f11d950be08b44e490c18bbcf0f49fdaaf643a9
|
[
"MIT"
] | 1
|
2018-06-21T17:13:11.000Z
|
2019-06-24T11:11:17.000Z
|
chat/__init__.py
|
yuru-yuri/websocket-chat
|
8f11d950be08b44e490c18bbcf0f49fdaaf643a9
|
[
"MIT"
] | null | null | null |
from . import web_socket
from . import http
from .db.user import User
from .db.message import Message
from .utils import root_path
import os
import tornado.httpserver
import tornado.web
import tornado.ioloop
def main():
# web_socket.make_route() # TODO
tornado.httpserver.HTTPServer(tornado.web.Application((
(r'/ws/', web_socket.ChatWebSocket),
(r'/user/(\w+)/?', http.Http),
(r'/.*', tornado.web.StaticFileHandler, {
'path': os.path.join(root_path(), 'static'),
'default_filename': 'index.html'
})
)))
# user = User()
pass
| 24.2
| 59
| 0.633058
| 74
| 605
| 5.081081
| 0.445946
| 0.071809
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.221488
| 605
| 24
| 60
| 25.208333
| 0.798301
| 0.072727
| 0
| 0
| 0
| 0
| 0.100539
| 0
| 0
| 0
| 0
| 0.041667
| 0
| 1
| 0.052632
| true
| 0.052632
| 0.473684
| 0
| 0.526316
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 5
|
229c55029dd98e03b32b8e9e0c08113944b530f7
| 121
|
py
|
Python
|
config/__init__.py
|
zhouhongf/loader_database
|
85e3d606d0bd2316eb5ae2c645766b3e5f4a7832
|
[
"MIT"
] | 1
|
2021-11-27T06:40:43.000Z
|
2021-11-27T06:40:43.000Z
|
config/__init__.py
|
zhouhongf/loader_database
|
85e3d606d0bd2316eb5ae2c645766b3e5f4a7832
|
[
"MIT"
] | null | null | null |
config/__init__.py
|
zhouhongf/loader_database
|
85e3d606d0bd2316eb5ae2c645766b3e5f4a7832
|
[
"MIT"
] | null | null | null |
from .log import Logger
from .config import Config
from .decorators import singleton
from .bank_dict import BankDict
| 24.2
| 34
| 0.801653
| 17
| 121
| 5.647059
| 0.588235
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.165289
| 121
| 4
| 35
| 30.25
| 0.950495
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
229fbdb9940ddaeec128091eb36a991488cf0500
| 211
|
py
|
Python
|
data/syn_project/mutations/annotation_data_input.py
|
ki-tools/sls_ki_synapse
|
8c726a9ec568e3d416049a8813c21bbe87740f16
|
[
"Apache-2.0"
] | 1
|
2018-11-21T19:54:34.000Z
|
2018-11-21T19:54:34.000Z
|
data/syn_project/mutations/annotation_data_input.py
|
pcstout/sls_ki_synapse
|
8c726a9ec568e3d416049a8813c21bbe87740f16
|
[
"Apache-2.0"
] | 5
|
2019-03-12T16:44:35.000Z
|
2019-03-15T21:46:00.000Z
|
data/syn_project/mutations/annotation_data_input.py
|
ki-tools/sls_ki_synapse
|
8c726a9ec568e3d416049a8813c21bbe87740f16
|
[
"Apache-2.0"
] | 2
|
2019-02-28T23:16:32.000Z
|
2019-03-05T22:16:39.000Z
|
import graphene
class AnnotationDataInput(graphene.InputObjectType):
"""
Input class for 'annotations' data.
"""
key = graphene.String(required=True)
value = graphene.String(required=True)
| 21.1
| 52
| 0.706161
| 21
| 211
| 7.095238
| 0.666667
| 0.187919
| 0.295302
| 0.348993
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.184834
| 211
| 9
| 53
| 23.444444
| 0.866279
| 0.165877
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.25
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
22a59fe60ee41c06f25f4bd200dec9c6fd27a57b
| 58
|
py
|
Python
|
shapenet/utils/__init__.py
|
mierzejk/shapenet
|
fe7a6caf726b34e3bce7d696b846b8df446ba998
|
[
"BSD-2-Clause"
] | null | null | null |
shapenet/utils/__init__.py
|
mierzejk/shapenet
|
fe7a6caf726b34e3bce7d696b846b8df446ba998
|
[
"BSD-2-Clause"
] | null | null | null |
shapenet/utils/__init__.py
|
mierzejk/shapenet
|
fe7a6caf726b34e3bce7d696b846b8df446ba998
|
[
"BSD-2-Clause"
] | 1
|
2020-09-25T08:55:12.000Z
|
2020-09-25T08:55:12.000Z
|
from .load_config_file import Config
from .misc import now
| 29
| 36
| 0.844828
| 10
| 58
| 4.7
| 0.7
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12069
| 58
| 2
| 37
| 29
| 0.921569
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
22ba6b39944a9cf6747c3f20dd8af50cf1f24c2d
| 516
|
py
|
Python
|
sifter/grammar/__init__.py
|
tomduckering/sifter
|
cb2656ac47125e9e06c9bdda56193da41cc340a8
|
[
"BSD-2-Clause"
] | 10
|
2016-11-25T08:38:28.000Z
|
2020-09-29T06:45:47.000Z
|
sifter/grammar/__init__.py
|
tomduckering/sifter
|
cb2656ac47125e9e06c9bdda56193da41cc340a8
|
[
"BSD-2-Clause"
] | 3
|
2020-08-24T16:17:30.000Z
|
2020-10-09T08:26:14.000Z
|
sifter/grammar/__init__.py
|
python-sifter/sifter
|
cb2656ac47125e9e06c9bdda56193da41cc340a8
|
[
"BSD-2-Clause"
] | 6
|
2015-11-06T00:46:18.000Z
|
2020-10-07T16:27:41.000Z
|
# modules with no dependencies on other modules in sifter.grammar
from sifter.grammar.actions import *
from sifter.grammar.comparator import *
from sifter.grammar.string import String
from sifter.grammar.tag import *
from sifter.grammar.validator import *
# modules that only depend on a module above
from sifter.grammar.rule import *
from sifter.grammar.state import *
# the rest in dependency order
from sifter.grammar.command_list import *
from sifter.grammar.command import *
from sifter.grammar.test import *
| 32.25
| 65
| 0.806202
| 75
| 516
| 5.533333
| 0.413333
| 0.344578
| 0.409639
| 0.33253
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.129845
| 516
| 15
| 66
| 34.4
| 0.924276
| 0.261628
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
22e190ff4ed74c87c8133fc8869d63ee7cdcec3f
| 158
|
py
|
Python
|
sslib/__init__.py
|
patrickmacarthur/ssrando
|
39e8fbc43433b063641474f037d9b3be134e1522
|
[
"MIT"
] | null | null | null |
sslib/__init__.py
|
patrickmacarthur/ssrando
|
39e8fbc43433b063641474f037d9b3be134e1522
|
[
"MIT"
] | null | null | null |
sslib/__init__.py
|
patrickmacarthur/ssrando
|
39e8fbc43433b063641474f037d9b3be134e1522
|
[
"MIT"
] | null | null | null |
from .u8file import U8File
from .bzs import buildBzs, parseBzs
from .msb import parseMSB, buildMSB
from .patch import Patcher
from .allpatch import AllPatcher
| 31.6
| 35
| 0.822785
| 22
| 158
| 5.909091
| 0.590909
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.014599
| 0.132911
| 158
| 5
| 36
| 31.6
| 0.934307
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
22e192831654bf6af9883c61cfd7c55d42fcbf3c
| 93,699
|
py
|
Python
|
mmtbx/geometry_restraints/torsion_restraints/tst_reference_model.py
|
rimmartin/cctbx_project
|
644090f9432d9afc22cfb542fc3ab78ca8e15e5d
|
[
"BSD-3-Clause-LBNL"
] | null | null | null |
mmtbx/geometry_restraints/torsion_restraints/tst_reference_model.py
|
rimmartin/cctbx_project
|
644090f9432d9afc22cfb542fc3ab78ca8e15e5d
|
[
"BSD-3-Clause-LBNL"
] | null | null | null |
mmtbx/geometry_restraints/torsion_restraints/tst_reference_model.py
|
rimmartin/cctbx_project
|
644090f9432d9afc22cfb542fc3ab78ca8e15e5d
|
[
"BSD-3-Clause-LBNL"
] | null | null | null |
from __future__ import division
from mmtbx import monomer_library
from mmtbx.geometry_restraints.torsion_restraints.reference_model import \
reference_model, reference_model_params
from mmtbx.geometry_restraints.torsion_restraints import utils
from mmtbx.validation.rotalyze import rotalyze
import mmtbx.model
from cctbx.array_family import flex
import iotbx.phil
import iotbx.pdb
from libtbx.test_utils import show_diff
import libtbx.load_env
import cStringIO
import sys, os, time
model_raw_records = """\
CRYST1 41.566 72.307 92.870 108.51 93.02 90.06 P 1 4
ATOM 5466 N ASN C 236 17.899 72.943 29.028 1.00 60.13 N
ATOM 5467 CA ASN C 236 16.519 72.435 29.114 1.00 60.52 C
ATOM 5468 C ASN C 236 16.377 70.925 29.327 1.00 60.49 C
ATOM 5469 O ASN C 236 15.429 70.294 28.863 1.00 60.60 O
ATOM 5470 CB ASN C 236 15.689 72.896 27.916 1.00 60.55 C
ATOM 5471 CG ASN C 236 14.357 73.447 28.338 1.00 61.75 C
ATOM 5472 OD1 ASN C 236 14.256 74.609 28.768 1.00 62.86 O
ATOM 5473 ND2 ASN C 236 13.319 72.616 28.247 1.00 61.22 N
ATOM 5474 N LEU C 237 17.316 70.364 30.068 1.00 60.55 N
ATOM 5475 CA LEU C 237 17.444 68.931 30.166 1.00 60.48 C
ATOM 5476 C LEU C 237 17.815 68.555 31.581 1.00 60.06 C
ATOM 5477 O LEU C 237 17.335 67.547 32.097 1.00 60.41 O
ATOM 5478 CB LEU C 237 18.518 68.464 29.178 1.00 60.91 C
ATOM 5479 CG LEU C 237 18.542 67.095 28.491 1.00 62.25 C
ATOM 5480 CD1 LEU C 237 17.407 66.153 28.923 1.00 63.18 C
ATOM 5481 CD2 LEU C 237 18.563 67.309 26.965 1.00 62.89 C
"""
reference_raw_records = """\
CRYST1 40.688 71.918 93.213 108.16 93.25 90.40 P 1 4
ATOM 5485 N ASN C 236 16.417 72.834 29.095 1.00 7.17 N
ATOM 5486 CA ASN C 236 15.051 72.312 29.173 1.00 7.74 C
ATOM 5487 C ASN C 236 15.000 70.818 29.431 1.00 7.38 C
ATOM 5488 O ASN C 236 14.047 70.141 29.024 1.00 7.80 O
ATOM 5489 CB ASN C 236 14.281 72.645 27.887 1.00 8.78 C
ATOM 5490 CG ASN C 236 12.769 72.657 28.088 1.00 13.44 C
ATOM 5491 OD1 ASN C 236 12.265 73.196 29.082 1.00 20.19 O
ATOM 5492 ND2 ASN C 236 12.032 72.114 27.109 1.00 16.07 N
ATOM 5493 N LEU C 237 16.010 70.282 30.134 1.00 6.60 N
ATOM 5494 CA LEU C 237 16.122 68.825 30.270 1.00 7.41 C
ATOM 5495 C LEU C 237 16.481 68.430 31.697 1.00 6.01 C
ATOM 5496 O LEU C 237 15.944 67.448 32.224 1.00 6.47 O
ATOM 5497 CB LEU C 237 17.151 68.239 29.297 1.00 8.10 C
ATOM 5498 CG LEU C 237 17.384 66.726 29.347 1.00 10.94 C
ATOM 5499 CD1 LEU C 237 16.055 65.956 29.107 1.00 13.10 C
ATOM 5500 CD2 LEU C 237 18.455 66.271 28.343 1.00 11.63 C
"""
reference_raw_records_alt_seq = """\
CRYST1 40.688 71.918 93.213 108.16 93.25 90.40 P 1 4
ATOM 5485 N ASN B 246 16.417 72.834 29.095 1.00 7.17 N
ATOM 5486 CA ASN B 246 15.051 72.312 29.173 1.00 7.74 C
ATOM 5487 C ASN B 246 15.000 70.818 29.431 1.00 7.38 C
ATOM 5488 O ASN B 246 14.047 70.141 29.024 1.00 7.80 O
ATOM 5489 CB ASN B 246 14.281 72.645 27.887 1.00 8.78 C
ATOM 5490 CG ASN B 246 12.769 72.657 28.088 1.00 13.44 C
ATOM 5491 OD1 ASN B 246 12.265 73.196 29.082 1.00 20.19 O
ATOM 5492 ND2 ASN B 246 12.032 72.114 27.109 1.00 16.07 N
ATOM 5493 N LEU B 247 16.010 70.282 30.134 1.00 6.60 N
ATOM 5494 CA LEU B 247 16.122 68.825 30.270 1.00 7.41 C
ATOM 5495 C LEU B 247 16.481 68.430 31.697 1.00 6.01 C
ATOM 5496 O LEU B 247 15.944 67.448 32.224 1.00 6.47 O
ATOM 5497 CB LEU B 247 17.151 68.239 29.297 1.00 8.10 C
ATOM 5498 CG LEU B 247 17.384 66.726 29.347 1.00 10.94 C
ATOM 5499 CD1 LEU B 247 16.055 65.956 29.107 1.00 13.10 C
ATOM 5500 CD2 LEU B 247 18.455 66.271 28.343 1.00 11.63 C
"""
reference_raw_records_match = """\
CRYST1 40.688 71.918 93.213 108.16 93.25 90.40 P 1 4
ATOM 5485 N ASN C 270 16.417 72.834 29.095 1.00 7.17 N
ATOM 5486 CA ASN C 270 15.051 72.312 29.173 1.00 7.74 C
ATOM 5487 C ASN C 270 15.000 70.818 29.431 1.00 7.38 C
ATOM 5488 O ASN C 270 14.047 70.141 29.024 1.00 7.80 O
ATOM 5489 CB ASN C 270 14.281 72.645 27.887 1.00 8.78 C
ATOM 5490 CG ASN C 270 12.769 72.657 28.088 1.00 13.44 C
ATOM 5491 OD1 ASN C 270 12.265 73.196 29.082 1.00 20.19 O
ATOM 5492 ND2 ASN C 270 12.032 72.114 27.109 1.00 16.07 N
ATOM 5493 N ALA C 271 16.010 70.282 30.134 1.00 6.60 N
ATOM 5494 CA ALA C 271 16.122 68.825 30.270 1.00 7.41 C
ATOM 5495 C ALA C 271 16.481 68.430 31.697 1.00 6.01 C
ATOM 5496 O ALA C 271 15.944 67.448 32.224 1.00 6.47 O
ATOM 5497 CB ALA C 271 17.151 68.239 29.297 1.00 8.10 C
"""
def exercise_reference_model(args, mon_lib_srv, ener_lib):
log = cStringIO.StringIO()
work_params = reference_model_params.extract()
work_params.reference_model.enabled = True
work_params.reference_model.fix_outliers = False
model = mmtbx.model.manager(
model_input = iotbx.pdb.input(lines=flex.split_lines(model_raw_records),
source_info=None),
process_input=True)
pdb_h = model.get_hierarchy()
reference_hierarchy_list = []
tmp_hierarchy = iotbx.pdb.input(
source_info=None,
lines=flex.split_lines(reference_raw_records)).construct_hierarchy()
reference_hierarchy_list.append(tmp_hierarchy)
rm = reference_model(
model=model,
reference_hierarchy_list=reference_hierarchy_list,
params=work_params.reference_model,
log=log)
assert rm.get_n_proxies() == 5, "Got %d, expected 5" % rm.get_n_proxies()
reference_hierarchy_list_alt_seq = []
tmp_hierarchy = iotbx.pdb.input(
source_info=None,
lines=flex.split_lines(reference_raw_records_alt_seq)).\
construct_hierarchy()
reference_hierarchy_list_alt_seq.append(tmp_hierarchy)
reference_hierarchy_list_ref_match = []
tmp_hierarchy = iotbx.pdb.input(
source_info=None,
lines=flex.split_lines(reference_raw_records_match)).\
construct_hierarchy()
reference_hierarchy_list_ref_match.append(tmp_hierarchy)
i_seq_name_hash = utils.build_name_hash(
pdb_hierarchy=pdb_h)
assert i_seq_name_hash == \
{0: ' N ASN C 236 ', 1: ' CA ASN C 236 ',
2: ' C ASN C 236 ', 3: ' O ASN C 236 ',
4: ' CB ASN C 236 ', 5: ' CG ASN C 236 ',
6: ' OD1 ASN C 236 ', 7: ' ND2 ASN C 236 ',
8: ' N LEU C 237 ', 9: ' CA LEU C 237 ',
10: ' C LEU C 237 ', 11: ' O LEU C 237 ',
12: ' CB LEU C 237 ', 13: ' CG LEU C 237 ',
14: ' CD1 LEU C 237 ', 15: ' CD2 LEU C 237 '}
i_seq_element_hash = utils.build_element_hash(
pdb_hierarchy=pdb_h)
assert i_seq_element_hash == \
{0: 'N', 1: 'C', 2: 'C', 3: 'O', 4: 'C', 5: 'C', 6: 'O', 7: 'N', 8: 'N',
9: 'C', 10: 'C', 11: 'O', 12: 'C', 13: 'C', 14: 'C', 15: 'C'}
ref_pdb_hierarchy = reference_hierarchy_list[0]
dihedral_proxies = \
utils.get_complete_dihedral_proxies(pdb_hierarchy=ref_pdb_hierarchy)
sites_cart_ref = ref_pdb_hierarchy.atoms().extract_xyz()
dihedral_hash = rm.build_dihedral_hash(
dihedral_proxies=dihedral_proxies,
sites_cart=sites_cart_ref,
pdb_hierarchy=ref_pdb_hierarchy,
include_hydrogens=False,
include_main_chain=True,
include_side_chain=True)
assert len(dihedral_hash) == 5
reference_dihedral_proxies = rm.reference_dihedral_proxies.deep_copy()
assert reference_dihedral_proxies is not None
assert len(reference_dihedral_proxies) == len(dihedral_hash)
for rdp in reference_dihedral_proxies:
assert rdp.limit == work_params.reference_model.limit
r1 = rotalyze(pdb_hierarchy=pdb_h, outliers_only=False)
out1 = cStringIO.StringIO()
r1.show_old_output(out=out1)
r2 = rotalyze(pdb_hierarchy=ref_pdb_hierarchy, outliers_only=False)
out2 = cStringIO.StringIO()
r2.show_old_output(out=out2)
assert not show_diff(out1.getvalue(), """\
C 236 ASN:1.00:0.2:227.3:80.2:::OUTLIER:OUTLIER
C 237 LEU:1.00:0.0:209.6:357.2:::OUTLIER:OUTLIER
""")
assert not show_diff(out2.getvalue(), """\
C 236 ASN:1.00:39.1:203.2:43.6:::Favored:t0
C 237 LEU:1.00:60.8:179.1:57.3:::Favored:tp
""")
xray_structure = pdb_h.extract_xray_structure()
rm.set_rotamer_to_reference(
xray_structure=xray_structure,
mon_lib_srv=mon_lib_srv,
quiet=True)
pdb_h.adopt_xray_structure(xray_structure)
r2 = rotalyze(pdb_hierarchy=pdb_h, outliers_only=False)
out3 = cStringIO.StringIO()
r2.show_old_output(out=out3)
assert not show_diff(out3.getvalue(), """\
C 236 ASN:1.00:39.1:203.2:43.6:::Favored:t0
C 237 LEU:1.00:60.8:179.1:57.3:::Favored:tp
""")
match_map = rm.match_map['ref0']
assert match_map == \
{0: 0, 1: 1, 2: 2, 3: 3, 4: 4, 5: 5, 6: 6, 7: 7, 8: 8, 9: 9, 10: 10, 11: 11,
12: 12, 13: 13, 14: 14, 15: 15}
master_phil_str_overrides = """
reference_model.reference_group {
reference= chain B and resseq 246:247
selection= chain C and resid 236:237
}
"""
def_pars = reference_model_params
pars = iotbx.phil.parse(master_phil_str_overrides)
all_pars = def_pars.fetch(pars).extract()
all_pars.reference_model.enabled = True
rm = reference_model(
model = model,
reference_hierarchy_list=reference_hierarchy_list_alt_seq,
params=all_pars.reference_model,
log=log)
match_map = rm.match_map
assert match_map['ref0'] == \
{0: 0, 1: 1, 2: 2, 3: 3, 4: 4, 5: 5, 6: 6, 7: 7, 8: 8, 9: 9, 10: 10, 11: 11,
12: 12, 13: 13, 14: 14, 15: 15}
pdb_file = libtbx.env.find_in_repositories(
relative_path="phenix_regression/pdb/1ywf.pdb",
test=os.path.isfile)
model = mmtbx.model.manager(
model_input = iotbx.pdb.input(file_name=pdb_file,
source_info=None),
process_input=True)
pdb_h = model.get_hierarchy()
# pdb_hierarchy = iotbx.pdb.input(file_name=pdb_file).construct_hierarchy()
reference_file_list = []
reference_file_list.append(pdb_file)
work_pars = reference_model_params.extract()
work_pars.reference_model.fix_outliers = False
work_pars.reference_model.enabled = True
rm = reference_model(
model=model,
reference_file_list=reference_file_list,
params=work_pars.reference_model,
log=log)
reference_dihedral_proxies = rm.reference_dihedral_proxies
standard_weight = 0
for dp in reference_dihedral_proxies:
if dp.weight == 1.0:
standard_weight += 1
assert standard_weight == 1181, "Expecting 1181, got %d" % standard_weight
if (not libtbx.env.has_module(name="ksdssp")):
print "Skipping KSDSSP tests: ksdssp module not available."
else:
work_pars = reference_model_params.extract()
work_pars.reference_model.secondary_structure_only = True
work_pars.reference_model.enabled = True
rm.params = work_pars.reference_model
rm.get_reference_dihedral_proxies(model=model)
reference_dihedral_proxies = rm.reference_dihedral_proxies
ss_weight = 0
for dp in reference_dihedral_proxies:
if dp.weight == 1.0:
ss_weight += 1
assert ss_weight == 694, "expecting 694 proxies, got %d" % ss_weight
#test SSM alignment
pdb_file = libtbx.env.find_in_repositories(
relative_path="phenix_regression/ncs/rnase-s.pdb",
test=os.path.isfile)
pdb_hierarchy = iotbx.pdb.input(file_name=pdb_file).construct_hierarchy()
reference_file_list = []
reference_file_list.append(pdb_file)
pdb_hierarchy.reset_i_seq_if_necessary()
import ccp4io_adaptbx
ssm = ccp4io_adaptbx.SecondaryStructureMatching(
reference=pdb_hierarchy.models()[0].chains()[0],
moving=pdb_hierarchy.models()[0].chains()[1])
alignment = ccp4io_adaptbx.SSMAlignment.residue_groups(match=ssm)
assert ssm.GetQvalues()[0] > 0.98
def exercise_multiple_to_one(args, mon_lib_srv, ener_lib):
pdb_str_original = """\
CRYST1 69.211 49.956 52.557 90.00 90.00 90.00 P 1
ATOM 1 N THR A 3 51.193 44.956 23.993 1.00 80.52 N
ATOM 2 CA THR A 3 50.812 43.732 23.211 1.00 80.52 C
ATOM 4 CB THR A 3 50.446 42.559 24.181 1.00 79.62 C
ATOM 6 OG1 THR A 3 50.206 41.358 23.433 1.00 79.62 O
ATOM 8 CG2 THR A 3 49.239 42.888 25.066 1.00 79.62 C
ATOM 12 C THR A 3 49.657 44.014 22.221 1.00 80.52 C
ATOM 13 O THR A 3 48.520 44.223 22.631 1.00 80.52 O
ATOM 17 N GLY A 4 49.963 44.013 20.917 1.00 79.31 N
ATOM 18 CA GLY A 4 49.030 44.458 19.892 1.00 79.31 C
ATOM 21 C GLY A 4 48.761 43.480 18.761 1.00 79.31 C
ATOM 22 O GLY A 4 47.790 42.725 18.808 1.00 79.31 O
ATOM 24 N ALA A 5 49.581 43.499 17.715 1.00 78.81 N
ATOM 25 CA ALA A 5 49.395 42.604 16.581 1.00 78.81 C
ATOM 27 CB ALA A 5 49.774 43.314 15.283 1.00 77.40 C
ATOM 31 C ALA A 5 50.195 41.315 16.714 1.00 78.81 C
ATOM 32 O ALA A 5 50.258 40.537 15.757 1.00 78.81 O
ATOM 34 N GLN A 6 50.816 41.073 17.872 1.00 80.55 N
ATOM 35 CA GLN A 6 51.642 39.880 18.018 1.00 80.55 C
ATOM 37 CB GLN A 6 52.383 39.879 19.354 1.00 79.84 C
ATOM 40 CG GLN A 6 53.264 41.072 19.596 1.00 79.84 C
ATOM 43 CD GLN A 6 52.490 42.211 20.225 1.00 79.84 C
ATOM 44 OE1 GLN A 6 51.290 42.091 20.489 1.00 79.84 O
ATOM 45 NE2 GLN A 6 53.167 43.325 20.468 1.00 79.84 N
ATOM 48 C GLN A 6 50.788 38.631 17.945 1.00 80.55 C
ATOM 49 O GLN A 6 51.148 37.659 17.273 1.00 80.55 O
ATOM 51 N VAL A 7 49.643 38.651 18.631 1.00 79.06 N
ATOM 52 CA VAL A 7 48.822 37.460 18.795 1.00 79.06 C
ATOM 54 CB VAL A 7 47.610 37.794 19.688 1.00 78.99 C
ATOM 56 CG1 VAL A 7 46.649 36.606 19.794 1.00 78.99 C
ATOM 60 CG2 VAL A 7 48.075 38.245 21.063 1.00 78.99 C
ATOM 64 C VAL A 7 48.399 36.907 17.450 1.00 79.06 C
ATOM 65 O VAL A 7 47.962 35.755 17.360 1.00 79.06 O
ATOM 67 N TYR A 8 48.538 37.700 16.390 1.00 79.78 N
ATOM 68 CA TYR A 8 48.445 37.147 15.051 1.00 79.78 C
ATOM 70 CB TYR A 8 48.732 38.228 14.014 1.00 77.69 C
ATOM 73 CG TYR A 8 48.634 37.736 12.583 1.00 77.69 C
ATOM 74 CD1 TYR A 8 47.404 37.638 11.944 1.00 77.69 C
ATOM 76 CE1 TYR A 8 47.308 37.187 10.640 1.00 77.69 C
ATOM 78 CZ TYR A 8 48.444 36.802 9.966 1.00 77.69 C
ATOM 79 OH TYR A 8 48.355 36.348 8.672 1.00 77.69 O
ATOM 81 CE2 TYR A 8 49.672 36.872 10.580 1.00 77.69 C
ATOM 83 CD2 TYR A 8 49.763 37.333 11.883 1.00 77.69 C
ATOM 85 C TYR A 8 49.416 35.991 14.857 1.00 79.78 C
ATOM 86 O TYR A 8 49.202 35.164 13.967 1.00 79.78 O
ATOM 88 N ALA A 9 50.475 35.912 15.671 1.00 79.03 N
ATOM 89 CA ALA A 9 51.463 34.844 15.546 1.00 79.02 C
ATOM 91 CB ALA A 9 52.444 34.896 16.719 1.00 79.18 C
ATOM 95 C ALA A 9 50.833 33.459 15.484 1.00 79.02 C
ATOM 96 O ALA A 9 51.470 32.524 14.982 1.00 79.02 O
ATOM 98 N ASN A 10 49.611 33.298 16.002 1.00 79.63 N
ATOM 99 CA ASN A 10 48.890 32.036 15.896 1.00 79.63 C
ATOM 101 CB ASN A 10 47.838 31.938 17.002 1.00 78.91 C
ATOM 104 CG ASN A 10 48.455 31.885 18.387 1.00 78.91 C
ATOM 105 OD1 ASN A 10 49.636 31.603 18.527 1.00 78.91 O
ATOM 106 ND2 ASN A 10 47.648 32.113 19.418 1.00 78.91 N
ATOM 109 C ASN A 10 48.213 31.859 14.543 1.00 79.63 C
ATOM 110 O ASN A 10 47.724 30.767 14.246 1.00 79.63 O
TER 58 ASN A 10
ATOM 1990 N THR B 3 21.107 5.000 45.226 1.00 82.71 N
ATOM 1991 CA THR B 3 21.187 6.487 45.312 1.00 82.71 C
ATOM 1993 CB THR B 3 20.105 7.035 46.286 1.00 80.11 C
ATOM 1995 OG1 THR B 3 20.201 6.377 47.557 1.00 80.11 O
ATOM 1997 CG2 THR B 3 18.701 6.831 45.702 1.00 80.11 C
ATOM 2001 C THR B 3 22.604 6.951 45.721 1.00 82.71 C
ATOM 2002 O THR B 3 23.561 6.189 45.599 1.00 82.71 O
ATOM 2006 N GLY B 4 22.752 8.203 46.153 1.00 80.69 N
ATOM 2007 CA GLY B 4 24.064 8.716 46.532 1.00 80.69 C
ATOM 2010 C GLY B 4 25.028 8.902 45.376 1.00 80.69 C
ATOM 2011 O GLY B 4 26.250 8.861 45.572 1.00 80.69 O
ATOM 2013 N ALA B 5 24.503 9.142 44.177 1.00 80.08 N
ATOM 2014 CA ALA B 5 25.268 9.118 42.937 1.00 80.09 C
ATOM 2016 CB ALA B 5 26.031 7.798 42.787 1.00 77.84 C
ATOM 2020 C ALA B 5 24.301 9.316 41.777 1.00 80.09 C
ATOM 2021 O ALA B 5 24.660 9.874 40.734 1.00 80.09 O
ATOM 2023 N GLN B 6 23.035 8.849 42.004 1.00 81.52 N
ATOM 2024 CA GLN B 6 21.978 8.970 41.003 1.00 81.53 C
ATOM 2026 CB GLN B 6 20.722 8.250 41.506 1.00 84.25 C
ATOM 2029 CG GLN B 6 19.920 9.000 42.596 1.00 84.24 C
ATOM 2032 CD GLN B 6 20.032 10.516 42.500 1.00 84.25 C
ATOM 2033 OE1 GLN B 6 19.770 11.098 41.447 1.00 84.24 O
ATOM 2034 NE2 GLN B 6 20.441 11.159 43.593 1.00 84.25 N
ATOM 2037 C GLN B 6 21.660 10.426 40.679 1.00 81.52 C
ATOM 2038 O GLN B 6 21.344 10.750 39.530 1.00 81.52 O
ATOM 2040 N VAL B 7 21.740 11.307 41.646 1.00 80.27 N
ATOM 2041 CA VAL B 7 21.376 12.702 41.416 1.00 80.28 C
ATOM 2043 CB VAL B 7 21.371 13.503 42.738 1.00 79.22 C
ATOM 2045 CG1 VAL B 7 21.092 15.002 42.494 1.00 79.23 C
ATOM 2049 CG2 VAL B 7 20.346 12.946 43.687 1.00 79.22 C
ATOM 2053 C VAL B 7 22.311 13.348 40.415 1.00 80.27 C
ATOM 2054 O VAL B 7 21.937 14.328 39.759 1.00 80.27 O
ATOM 2056 N TYR B 8 23.517 12.809 40.259 1.00 79.95 N
ATOM 2057 CA TYR B 8 24.474 13.363 39.313 1.00 79.95 C
ATOM 2059 CB TYR B 8 25.847 12.697 39.486 1.00 79.66 C
ATOM 2062 CG TYR B 8 26.909 13.218 38.529 1.00 79.66 C
ATOM 2063 CD1 TYR B 8 27.478 14.478 38.703 1.00 79.66 C
ATOM 2065 CE1 TYR B 8 28.444 14.958 37.831 1.00 79.66 C
ATOM 2067 CZ TYR B 8 28.865 14.173 36.779 1.00 79.66 C
ATOM 2068 OH TYR B 8 29.825 14.640 35.913 1.00 79.66 O
ATOM 2070 CE2 TYR B 8 28.325 12.919 36.585 1.00 79.66 C
ATOM 2072 CD2 TYR B 8 27.353 12.445 37.459 1.00 79.66 C
ATOM 2074 C TYR B 8 23.951 13.207 37.884 1.00 79.95 C
ATOM 2075 O TYR B 8 24.569 13.705 36.937 1.00 79.95 O
ATOM 2077 N ALA B 9 22.809 12.526 37.712 1.00 80.47 N
ATOM 2078 CA ALA B 9 22.221 12.378 36.382 1.00 80.47 C
ATOM 2080 CB ALA B 9 21.051 11.395 36.420 1.00 78.95 C
ATOM 2084 C ALA B 9 21.758 13.717 35.823 1.00 80.47 C
ATOM 2085 O ALA B 9 21.827 13.949 34.609 1.00 80.47 O
ATOM 2087 N ASN B 10 21.261 14.606 36.684 1.00 78.19 N
ATOM 2088 CA ASN B 10 20.912 15.948 36.235 1.00 78.18 C
ATOM 2090 CB ASN B 10 20.105 16.644 37.329 1.00 78.39 C
ATOM 2093 CG ASN B 10 18.743 16.000 37.542 1.00 78.39 C
ATOM 2094 OD1 ASN B 10 18.177 15.401 36.628 1.00 78.39 O
ATOM 2095 ND2 ASN B 10 18.229 16.094 38.762 1.00 78.39 N
ATOM 2098 C ASN B 10 22.147 16.764 35.859 1.00 78.19 C
ATOM 2099 O ASN B 10 22.037 17.714 35.076 1.00 78.18 O
TER 116 ASN B 10
ATOM 3968 N THR C 3 12.127 9.313 24.749 1.00 79.35 N
ATOM 3969 CA THR C 3 10.942 8.737 24.046 1.00 79.35 C
ATOM 3971 CB THR C 3 11.262 7.332 23.448 1.00 79.78 C
ATOM 3973 OG1 THR C 3 11.663 6.434 24.490 1.00 79.78 O
ATOM 3975 CG2 THR C 3 12.389 7.415 22.416 1.00 79.78 C
ATOM 3979 C THR C 3 9.763 8.654 25.028 1.00 79.35 C
ATOM 3980 O THR C 3 9.889 8.068 26.102 1.00 79.35 O
ATOM 3984 N GLY C 4 8.622 9.230 24.652 1.00 79.79 N
ATOM 3985 CA GLY C 4 7.500 9.425 25.558 1.00 79.79 C
ATOM 3988 C GLY C 4 7.491 10.798 26.210 1.00 79.79 C
ATOM 3989 O GLY C 4 8.275 11.700 25.895 1.00 79.79 O
ATOM 3991 N ALA C 5 6.558 10.952 27.145 1.00 80.31 N
ATOM 3992 CA ALA C 5 6.415 12.204 27.871 1.00 80.31 C
ATOM 3994 CB ALA C 5 5.000 12.293 28.444 1.00 76.75 C
ATOM 3998 C ALA C 5 7.437 12.387 28.995 1.00 80.31 C
ATOM 3999 O ALA C 5 7.578 13.512 29.487 1.00 80.31 O
ATOM 4001 N GLN C 6 8.160 11.330 29.402 1.00 79.57 N
ATOM 4002 CA GLN C 6 8.950 11.380 30.637 1.00 79.57 C
ATOM 4004 CB GLN C 6 9.511 9.998 31.005 1.00 81.75 C
ATOM 4007 CG GLN C 6 10.700 9.510 30.181 1.00 81.75 C
ATOM 4010 CD GLN C 6 10.314 9.110 28.788 1.00 81.75 C
ATOM 4011 OE1 GLN C 6 9.147 9.195 28.407 1.00 81.75 O
ATOM 4012 NE2 GLN C 6 11.293 8.668 28.008 1.00 81.75 N
ATOM 4015 C GLN C 6 10.108 12.363 30.557 1.00 79.57 C
ATOM 4016 O GLN C 6 10.641 12.760 31.599 1.00 79.57 O
ATOM 4018 N VAL C 7 10.531 12.731 29.349 1.00 79.61 N
ATOM 4019 CA VAL C 7 11.538 13.775 29.192 1.00 79.61 C
ATOM 4021 CB VAL C 7 11.695 14.094 27.694 1.00 78.73 C
ATOM 4023 CG1 VAL C 7 10.350 14.552 27.106 1.00 79.29 C
ATOM 4027 CG2 VAL C 7 12.788 15.133 27.480 1.00 80.51 C
ATOM 4031 C VAL C 7 11.178 15.010 30.006 1.00 79.62 C
ATOM 4032 O VAL C 7 12.062 15.759 30.443 1.00 79.61 O
ATOM 4034 N TYR C 8 9.882 15.234 30.243 1.00 78.80 N
ATOM 4035 CA TYR C 8 9.422 16.321 31.101 1.00 78.80 C
ATOM 4037 CB TYR C 8 7.887 16.311 31.116 1.00 79.05 C
ATOM 4040 CG TYR C 8 7.242 17.382 31.967 1.00 79.05 C
ATOM 4041 CD1 TYR C 8 7.143 18.691 31.510 1.00 79.05 C
ATOM 4043 CE1 TYR C 8 6.548 19.676 32.279 1.00 79.05 C
ATOM 4045 CZ TYR C 8 6.045 19.358 33.521 1.00 79.05 C
ATOM 4046 OH TYR C 8 5.457 20.342 34.283 1.00 79.05 O
ATOM 4048 CE2 TYR C 8 6.125 18.064 33.998 1.00 79.05 C
ATOM 4050 CD2 TYR C 8 6.720 17.084 33.219 1.00 79.05 C
ATOM 4052 C TYR C 8 9.983 16.231 32.521 1.00 78.80 C
ATOM 4053 O TYR C 8 9.801 17.170 33.302 1.00 78.80 O
ATOM 4055 N ALA C 9 10.675 15.139 32.866 1.00 79.52 N
ATOM 4056 CA ALA C 9 11.171 14.948 34.228 1.00 79.52 C
ATOM 4058 CB ALA C 9 12.014 13.674 34.293 1.00 78.34 C
ATOM 4062 C ALA C 9 11.983 16.145 34.702 1.00 79.52 C
ATOM 4063 O ALA C 9 11.793 16.641 35.818 1.00 79.52 O
ATOM 4065 N ASN C 10 12.896 16.627 33.865 1.00 80.25 N
ATOM 4066 CA ASN C 10 13.672 17.797 34.239 1.00 80.25 C
ATOM 4068 CB ASN C 10 14.712 18.063 33.172 1.00 78.17 C
ATOM 4071 CG ASN C 10 15.782 17.007 33.161 1.00 78.17 C
ATOM 4072 OD1 ASN C 10 16.004 16.325 34.166 1.00 78.17 O
ATOM 4073 ND2 ASN C 10 16.442 16.845 32.028 1.00 78.17 N
ATOM 4076 C ASN C 10 12.798 19.015 34.457 1.00 80.25 C
ATOM 4077 O ASN C 10 13.290 20.040 34.941 1.00 80.25 O
TER 174 ASN C 10
ATOM 5959 N THR D 3 60.805 23.774 6.731 1.00 77.43 N
ATOM 5960 CA THR D 3 61.763 22.725 7.191 1.00 77.43 C
ATOM 5962 CB THR D 3 62.603 22.175 6.010 1.00 78.92 C
ATOM 5964 OG1 THR D 3 63.305 23.243 5.360 1.00 78.92 O
ATOM 5966 CG2 THR D 3 61.703 21.469 5.000 1.00 78.92 C
ATOM 5970 C THR D 3 62.675 23.293 8.284 1.00 77.43 C
ATOM 5971 O THR D 3 62.761 24.506 8.443 1.00 77.43 O
ATOM 5975 N GLY D 4 63.363 22.412 9.022 1.00 79.20 N
ATOM 5976 CA GLY D 4 64.130 22.797 10.196 1.00 79.20 C
ATOM 5979 C GLY D 4 63.309 22.788 11.472 1.00 79.20 C
ATOM 5980 O GLY D 4 62.145 22.393 11.509 1.00 79.20 O
ATOM 5982 N ALA D 5 63.950 23.233 12.557 1.00 80.19 N
ATOM 5983 CA ALA D 5 63.257 23.361 13.836 1.00 80.19 C
ATOM 5985 CB ALA D 5 64.211 23.993 14.857 1.00 75.84 C
ATOM 5989 C ALA D 5 61.970 24.181 13.714 1.00 80.19 C
ATOM 5990 O ALA D 5 60.999 23.931 14.438 1.00 80.19 O
ATOM 5992 N GLN D 6 61.942 25.142 12.784 1.00 78.97 N
ATOM 5993 CA GLN D 6 60.843 26.092 12.591 1.00 78.97 C
ATOM 5995 CB GLN D 6 61.204 27.062 11.469 1.00 80.12 C
ATOM 5998 CG GLN D 6 61.464 26.355 10.144 1.00 80.12 C
ATOM 6001 CD GLN D 6 61.853 27.306 9.032 1.00 80.12 C
ATOM 6002 OE1 GLN D 6 62.179 28.464 9.288 1.00 80.12 O
ATOM 6003 NE2 GLN D 6 61.851 26.812 7.790 1.00 80.12 N
ATOM 6006 C GLN D 6 59.510 25.447 12.245 1.00 78.96 C
ATOM 6007 O GLN D 6 58.509 26.166 12.139 1.00 78.96 O
ATOM 6009 N VAL D 7 59.474 24.140 11.995 1.00 78.86 N
ATOM 6010 CA VAL D 7 58.194 23.449 11.865 1.00 78.86 C
ATOM 6012 CB VAL D 7 58.425 21.993 11.421 1.00 81.21 C
ATOM 6014 CG1 VAL D 7 58.877 21.975 9.986 1.00 81.21 C
ATOM 6018 CG2 VAL D 7 59.474 21.288 12.321 1.00 81.21 C
ATOM 6022 C VAL D 7 57.423 23.523 13.168 1.00 78.86 C
ATOM 6023 O VAL D 7 56.190 23.411 13.186 1.00 78.86 O
ATOM 6025 N TYR D 8 58.138 23.697 14.277 1.00 79.34 N
ATOM 6026 CA TYR D 8 57.515 23.918 15.568 1.00 79.34 C
ATOM 6028 CB TYR D 8 58.584 23.823 16.649 1.00 79.01 C
ATOM 6031 CG TYR D 8 58.096 24.160 18.024 1.00 79.01 C
ATOM 6032 CD1 TYR D 8 57.220 23.317 18.688 1.00 79.01 C
ATOM 6034 CE1 TYR D 8 56.778 23.608 19.947 1.00 79.01 C
ATOM 6036 CZ TYR D 8 57.227 24.739 20.578 1.00 79.01 C
ATOM 6037 OH TYR D 8 56.779 25.015 21.845 1.00 79.01 O
ATOM 6039 CE2 TYR D 8 58.111 25.590 19.948 1.00 79.01 C
ATOM 6041 CD2 TYR D 8 58.544 25.294 18.680 1.00 79.01 C
ATOM 6043 C TYR D 8 56.807 25.258 15.636 1.00 79.34 C
ATOM 6044 O TYR D 8 55.950 25.447 16.505 1.00 79.34 O
ATOM 6046 N ALA D 9 57.137 26.174 14.730 1.00 78.81 N
ATOM 6047 CA ALA D 9 56.591 27.522 14.741 1.00 78.81 C
ATOM 6049 CB ALA D 9 56.758 28.183 13.374 1.00 79.37 C
ATOM 6053 C ALA D 9 55.127 27.498 15.121 1.00 78.81 C
ATOM 6054 O ALA D 9 54.764 27.896 16.226 1.00 78.81 O
ATOM 6056 N ASN D 10 54.284 26.983 14.233 1.00 80.12 N
ATOM 6057 CA ASN D 10 52.848 27.017 14.467 1.00 80.13 C
ATOM 6059 CB ASN D 10 52.140 26.402 13.274 1.00 80.26 C
ATOM 6062 CG ASN D 10 52.645 25.031 12.969 1.00 80.26 C
ATOM 6063 OD1 ASN D 10 53.101 24.311 13.860 1.00 80.26 O
ATOM 6064 ND2 ASN D 10 52.586 24.655 11.705 1.00 80.26 N
ATOM 6067 C ASN D 10 52.420 26.314 15.753 1.00 80.14 C
ATOM 6068 O ASN D 10 51.225 26.333 16.068 1.00 80.16 O
TER 232 ASN D 10
END
"""
pdb_str_ref_minimized = """\
CRYST1 69.211 49.956 52.557 90.00 90.00 90.00 P 1
SCALE1 0.014449 0.000000 0.000000 0.00000
SCALE2 0.000000 0.020018 0.000000 0.00000
SCALE3 0.000000 0.000000 0.019027 0.00000
ATOM 1 N THR A 3 50.767 43.905 24.734 1.00 80.52 N
ATOM 2 CA THR A 3 50.582 43.115 23.523 1.00 80.52 C
ATOM 4 CB THR A 3 49.583 41.964 23.746 1.00 79.62 C
ATOM 6 OG1 THR A 3 49.442 41.209 22.536 1.00 79.62 O
ATOM 8 CG2 THR A 3 48.225 42.510 24.160 1.00 79.62 C
ATOM 12 C THR A 3 50.093 43.985 22.370 1.00 80.52 C
ATOM 13 O THR A 3 49.756 45.154 22.562 1.00 80.52 O
ATOM 17 N GLY A 4 50.055 43.408 21.174 1.00 79.31 N
ATOM 18 CA GLY A 4 49.609 44.126 19.994 1.00 79.31 C
ATOM 21 C GLY A 4 49.582 43.269 18.744 1.00 79.31 C
ATOM 22 O GLY A 4 48.530 43.075 18.136 1.00 79.31 O
ATOM 24 N ALA A 5 50.746 42.754 18.361 1.00 78.81 N
ATOM 25 CA ALA A 5 50.858 41.913 17.175 1.00 78.81 C
ATOM 27 CB ALA A 5 51.904 42.473 16.224 1.00 77.40 C
ATOM 31 C ALA A 5 51.199 40.476 17.556 1.00 78.81 C
ATOM 32 O ALA A 5 51.983 39.814 16.877 1.00 78.81 O
ATOM 34 N GLN A 6 50.604 40.001 18.645 1.00 80.55 N
ATOM 35 CA GLN A 6 50.843 38.643 19.118 1.00 80.55 C
ATOM 37 CB GLN A 6 51.379 38.655 20.554 1.00 79.84 C
ATOM 40 CG GLN A 6 52.763 39.273 20.711 1.00 79.84 C
ATOM 43 CD GLN A 6 52.740 40.791 20.698 1.00 79.84 C
ATOM 44 OE1 GLN A 6 51.676 41.408 20.641 1.00 79.84 O
ATOM 45 NE2 GLN A 6 53.919 41.400 20.750 1.00 79.84 N
ATOM 48 C GLN A 6 49.570 37.807 19.041 1.00 80.55 C
ATOM 49 O GLN A 6 49.417 36.823 19.765 1.00 80.55 O
ATOM 51 N VAL A 7 48.660 38.205 18.158 1.00 79.06 N
ATOM 52 CA VAL A 7 47.399 37.495 17.985 1.00 79.06 C
ATOM 54 CB VAL A 7 46.201 38.453 18.098 1.00 78.99 C
ATOM 56 CG1 VAL A 7 44.896 37.694 17.913 1.00 78.99 C
ATOM 60 CG2 VAL A 7 46.222 39.173 19.437 1.00 78.99 C
ATOM 64 C VAL A 7 47.382 36.766 16.647 1.00 79.06 C
ATOM 65 O VAL A 7 47.123 35.564 16.586 1.00 79.06 O
ATOM 67 N TYR A 8 47.661 37.501 15.575 1.00 79.78 N
ATOM 68 CA TYR A 8 47.677 36.928 14.235 1.00 79.78 C
ATOM 70 CB TYR A 8 47.601 38.032 13.178 1.00 77.69 C
ATOM 73 CG TYR A 8 47.544 37.522 11.755 1.00 77.69 C
ATOM 74 CD1 TYR A 8 46.344 37.106 11.194 1.00 77.69 C
ATOM 76 CE1 TYR A 8 46.286 36.641 9.894 1.00 77.69 C
ATOM 78 CZ TYR A 8 47.437 36.589 9.136 1.00 77.69 C
ATOM 79 OH TYR A 8 47.384 36.126 7.842 1.00 77.69 O
ATOM 81 CE2 TYR A 8 48.641 36.998 9.670 1.00 77.69 C
ATOM 83 CD2 TYR A 8 48.689 37.462 10.971 1.00 77.69 C
ATOM 85 C TYR A 8 48.925 36.077 14.022 1.00 79.78 C
ATOM 86 O TYR A 8 48.903 35.104 13.267 1.00 79.78 O
ATOM 88 N ALA A 9 50.010 36.450 14.691 1.00 79.03 N
ATOM 89 CA ALA A 9 51.269 35.722 14.577 1.00 79.02 C
ATOM 91 CB ALA A 9 52.423 36.578 15.075 1.00 79.18 C
ATOM 95 C ALA A 9 51.210 34.407 15.346 1.00 79.02 C
ATOM 96 O ALA A 9 51.871 33.434 14.982 1.00 79.02 O
ATOM 98 N ASN A 10 50.416 34.384 16.411 1.00 79.63 N
ATOM 99 CA ASN A 10 50.270 33.188 17.233 1.00 79.63 C
ATOM 101 CB ASN A 10 50.147 33.564 18.711 1.00 78.91 C
ATOM 104 CG ASN A 10 51.365 34.304 19.227 1.00 78.91 C
ATOM 105 OD1 ASN A 10 52.472 34.134 18.716 1.00 78.91 O
ATOM 106 ND2 ASN A 10 51.167 35.132 20.246 1.00 78.91 N
ATOM 109 C ASN A 10 49.059 32.369 16.797 1.00 79.63 C
ATOM 110 O ASN A 10 49.101 31.139 16.787 1.00 79.63 O
TER
"""
ref_file = open("ref.pdb", 'w')
ref_file.write(pdb_str_ref_minimized)
ref_file.close()
log = cStringIO.StringIO()
# log = sys.stdout
# orig_file = open("start.pdb", "w")
# orig_file.write(pdb_str_original)
# orig_file.close()
def_pars = reference_model_params
params_text = """\
reference_model {
reference_group {
reference = chain 'A'
selection = chain 'A'
file_name = "ref.pdb"
}
reference_group {
reference = chain 'A'
selection = chain 'B'
file_name = "ref.pdb"
}
reference_group {
reference = chain 'A'
selection = chain 'C'
file_name = "ref.pdb"
}
reference_group {
reference = chain 'A'
selection = chain 'D'
file_name = "ref.pdb"
}
} """
pars = iotbx.phil.parse(params_text)
all_pars = def_pars.fetch(pars).extract()
all_pars.reference_model.enabled = True
model = mmtbx.model.manager(
model_input = iotbx.pdb.input(lines=flex.split_lines(pdb_str_original),
source_info=None),
process_input=True)
pdb_h = model.get_hierarchy()
rm = reference_model(
model = model,
reference_file_list=['ref.pdb'],
params=all_pars.reference_model,
log=log)
# rm.show_reference_summary(log=log)
assert rm.get_n_proxies() == 124, \
"Expecting 124 proxies, got %d" % rm.get_n_proxies()
# STOP()
new_h = pdb_h.deep_copy()
xray_structure = new_h.extract_xray_structure()
rm.set_rotamer_to_reference(
xray_structure=xray_structure)
new_h.adopt_xray_structure(xray_structure)
r1 = rotalyze(pdb_hierarchy=new_h, outliers_only=False)
assert r1.n_outliers == 0
# new_h.write_pdb_file(file_name="final.pdb")
#
# The same, but from multiple files
for i in range(4):
ref_file = open("ref_%d.pdb" % i, 'w')
ref_file.write(pdb_str_ref_minimized)
ref_file.close()
def_pars = reference_model_params
params_text = """\
reference_model {
file = ref_0.pdb
file = ref_1.pdb
file = ref_2.pdb
file = ref_3.pdb
reference_group {
reference = chain 'A'
selection = chain 'A'
file_name = "ref_0.pdb"
}
reference_group {
reference = chain 'A'
selection = chain 'B'
file_name = "ref_1.pdb"
}
reference_group {
reference = chain 'A'
selection = chain 'C'
file_name = "ref_2.pdb"
}
reference_group {
reference = chain 'A'
selection = chain 'D'
file_name = "ref_3.pdb"
}
} """
pars = iotbx.phil.parse(params_text)
all_pars = def_pars.fetch(pars).extract()
all_pars.reference_model.enabled = True
rm = reference_model(
model=model,
reference_file_list=['ref_0.pdb', 'ref_1.pdb', 'ref_2.pdb', 'ref_3.pdb'],
params=all_pars.reference_model,
log=log)
assert rm.get_n_proxies() == 124, \
"Expecting 124 proxies, got %d" % rm.get_n_proxies()
for i in range(4):
os.remove("ref_%d.pdb" % i)
#
# The same, 1 group, should be 116/4=29 proxies
ref_file = open("ref_0.pdb", 'w')
ref_file.write(pdb_str_ref_minimized)
ref_file.close()
def_pars = reference_model_params
params_text = """\
reference_model {
file = ref_0.pdb
reference_group {
reference = chain 'A'
selection = chain 'A'
file_name = "ref_0.pdb"
}
} """
pars = iotbx.phil.parse(params_text)
all_pars = def_pars.fetch(pars).extract()
all_pars.reference_model.enabled = True
rm = reference_model(
model=model,
reference_file_list=['ref_0.pdb'],
params=all_pars.reference_model,
log=log)
assert rm.get_n_proxies() == 31, \
"Expecting 31 proxies, got %d" % rm.get_n_proxies()
all_pars.reference_model.side_chain=False
rm = reference_model(
model=model,
reference_file_list=['ref_0.pdb'],
params=all_pars.reference_model,
log=log)
assert rm.get_n_proxies() == 21, \
"Expecting 21 proxies, got %d" % rm.get_n_proxies()
all_pars.reference_model.side_chain=True
all_pars.reference_model.main_chain=False
rm = reference_model(
model=model,
reference_file_list=['ref_0.pdb'],
params=all_pars.reference_model,
log=log)
assert rm.get_n_proxies() == 10, \
"Expecting 10 proxies, got %d" % rm.get_n_proxies()
# just throw all in without specifying:
all_pars = def_pars.fetch().extract()
all_pars.reference_model.enabled = True
all_pars.reference_model.file = 'ref_0.pdb'
rm = reference_model(
model=model,
reference_file_list=['ref_0.pdb'],
params=all_pars.reference_model,
log=log)
assert rm.get_n_proxies() == 124, \
"Expecting 124 proxies, got %d" % rm.get_n_proxies()
os.remove("ref_0.pdb")
# reference on self and make sure it is chains A<->A, B<->B etc
log = cStringIO.StringIO()
def_pars = reference_model_params
all_pars = def_pars.fetch().extract()
all_pars.reference_model.enabled = True
all_pars.reference_model.use_starting_model_as_reference = True
rm = reference_model(
model=model,
reference_hierarchy_list=\
[model.get_hierarchy()],
params=all_pars.reference_model,
log=log)
rm.show_reference_summary(log=log)
log_strings = log.getvalue().split("\n")
# print rm.get_n_proxies()
# print "=========="
# print "\n".join(log_strings)
# print "=========="
assert rm.get_n_proxies() == 124, \
"Expecting 124 proxies, got %d" % rm.get_n_proxies()
for needed_string in [
"GLN A 6 <=====> GLN A 6",
"ALA A 9 <=====> ALA A 9",
"ASN A 10 <=====> ASN A 10",
"THR B 3 <=====> THR B 3",
"GLN B 6 <=====> GLN B 6",
"ALA B 9 <=====> ALA B 9",
"ASN B 10 <=====> ASN B 10",
"THR C 3 <=====> THR C 3",
"GLN C 6 <=====> GLN C 6",
"ALA D 5 <=====> ALA D 5",
"GLN D 6 <=====> GLN D 6",
]:
assert needed_string in log_strings, "'%s' not in log!" % needed_string
def exercise_multiple_ncs_groups_found(mon_lib_srv, ener_lib):
pdb_str_original = """\
CRYST1 49.945 53.842 33.425 90.00 90.00 90.00 P 1
ATOM 5466 N ASN C 236 9.580 47.176 25.356 1.00 60.13 N
ATOM 5467 CA ASN C 236 8.200 46.668 25.442 1.00 60.52 C
ATOM 5468 C ASN C 236 8.058 45.158 25.655 1.00 60.49 C
ATOM 5469 O ASN C 236 7.110 44.527 25.191 1.00 60.60 O
ATOM 5470 CB ASN C 236 7.370 47.129 24.244 1.00 60.55 C
ATOM 5471 CG ASN C 236 6.038 47.680 24.666 1.00 61.75 C
ATOM 5472 OD1 ASN C 236 5.937 48.842 25.096 1.00 62.86 O
ATOM 5473 ND2 ASN C 236 5.000 46.849 24.575 1.00 61.22 N
ATOM 5474 N LEU C 237 8.997 44.597 26.396 1.00 60.55 N
ATOM 5475 CA LEU C 237 9.125 43.164 26.494 1.00 60.48 C
ATOM 5476 C LEU C 237 9.496 42.788 27.909 1.00 60.06 C
ATOM 5477 O LEU C 237 9.016 41.780 28.425 1.00 60.41 O
ATOM 5478 CB LEU C 237 10.199 42.697 25.506 1.00 60.91 C
ATOM 5479 CG LEU C 237 10.223 41.328 24.819 1.00 62.25 C
ATOM 5480 CD1 LEU C 237 9.088 40.386 25.251 1.00 63.18 C
ATOM 5481 CD2 LEU C 237 10.244 41.542 23.293 1.00 62.89 C
TER
ATOM 1 N THR A 3 42.874 19.189 20.321 1.00 80.52 N
ATOM 2 CA THR A 3 42.493 17.965 19.539 1.00 80.52 C
ATOM 4 CB THR A 3 42.127 16.792 20.509 1.00 79.62 C
ATOM 6 OG1 THR A 3 41.887 15.591 19.761 1.00 79.62 O
ATOM 8 CG2 THR A 3 40.920 17.121 21.394 1.00 79.62 C
ATOM 12 C THR A 3 41.338 18.247 18.549 1.00 80.52 C
ATOM 13 O THR A 3 40.201 18.456 18.959 1.00 80.52 O
ATOM 17 N GLY A 4 41.644 18.246 17.245 1.00 79.31 N
ATOM 18 CA GLY A 4 40.711 18.691 16.220 1.00 79.31 C
ATOM 21 C GLY A 4 40.442 17.713 15.089 1.00 79.31 C
ATOM 22 O GLY A 4 39.471 16.958 15.136 1.00 79.31 O
ATOM 24 N ALA A 5 41.262 17.732 14.043 1.00 78.81 N
ATOM 25 CA ALA A 5 41.076 16.837 12.909 1.00 78.81 C
ATOM 27 CB ALA A 5 41.455 17.547 11.611 1.00 77.40 C
ATOM 31 C ALA A 5 41.876 15.548 13.042 1.00 78.81 C
ATOM 32 O ALA A 5 41.939 14.770 12.085 1.00 78.81 O
ATOM 34 N GLN A 6 42.497 15.306 14.200 1.00 80.55 N
ATOM 35 CA GLN A 6 43.323 14.113 14.346 1.00 80.55 C
ATOM 37 CB GLN A 6 44.064 14.112 15.682 1.00 79.84 C
ATOM 40 CG GLN A 6 44.945 15.305 15.924 1.00 79.84 C
ATOM 43 CD GLN A 6 44.171 16.444 16.553 1.00 79.84 C
ATOM 44 OE1 GLN A 6 42.971 16.324 16.817 1.00 79.84 O
ATOM 45 NE2 GLN A 6 44.848 17.558 16.796 1.00 79.84 N
ATOM 48 C GLN A 6 42.469 12.864 14.273 1.00 80.55 C
ATOM 49 O GLN A 6 42.829 11.892 13.601 1.00 80.55 O
ATOM 51 N VAL A 7 41.324 12.884 14.959 1.00 79.06 N
ATOM 52 CA VAL A 7 40.503 11.693 15.123 1.00 79.06 C
ATOM 54 CB VAL A 7 39.291 12.027 16.016 1.00 78.99 C
ATOM 56 CG1 VAL A 7 38.330 10.839 16.122 1.00 78.99 C
ATOM 60 CG2 VAL A 7 39.756 12.478 17.391 1.00 78.99 C
ATOM 64 C VAL A 7 40.080 11.140 13.778 1.00 79.06 C
ATOM 65 O VAL A 7 39.643 9.988 13.688 1.00 79.06 O
ATOM 67 N TYR A 8 40.219 11.933 12.718 1.00 79.78 N
ATOM 68 CA TYR A 8 40.126 11.380 11.379 1.00 79.78 C
ATOM 70 CB TYR A 8 40.413 12.461 10.342 1.00 77.69 C
ATOM 73 CG TYR A 8 40.315 11.969 8.911 1.00 77.69 C
ATOM 74 CD1 TYR A 8 39.085 11.871 8.272 1.00 77.69 C
ATOM 76 CE1 TYR A 8 38.989 11.420 6.968 1.00 77.69 C
ATOM 78 CZ TYR A 8 40.125 11.035 6.294 1.00 77.69 C
ATOM 79 OH TYR A 8 40.036 10.581 5.000 1.00 77.69 O
ATOM 81 CE2 TYR A 8 41.353 11.105 6.908 1.00 77.69 C
ATOM 83 CD2 TYR A 8 41.444 11.566 8.211 1.00 77.69 C
ATOM 85 C TYR A 8 41.097 10.224 11.185 1.00 79.78 C
ATOM 86 O TYR A 8 40.883 9.397 10.295 1.00 79.78 O
ATOM 88 N ALA A 9 42.156 10.145 11.999 1.00 79.03 N
ATOM 89 CA ALA A 9 43.144 9.077 11.874 1.00 79.02 C
ATOM 91 CB ALA A 9 44.125 9.129 13.047 1.00 79.18 C
ATOM 95 C ALA A 9 42.514 7.692 11.812 1.00 79.02 C
ATOM 96 O ALA A 9 43.151 6.757 11.310 1.00 79.02 O
ATOM 98 N ASN A 10 41.292 7.531 12.330 1.00 79.63 N
ATOM 99 CA ASN A 10 40.571 6.269 12.224 1.00 79.63 C
ATOM 101 CB ASN A 10 39.519 6.171 13.330 1.00 78.91 C
ATOM 104 CG ASN A 10 40.136 6.118 14.715 1.00 78.91 C
ATOM 105 OD1 ASN A 10 41.317 5.836 14.855 1.00 78.91 O
ATOM 106 ND2 ASN A 10 39.329 6.346 15.746 1.00 78.91 N
ATOM 109 C ASN A 10 39.894 6.092 10.871 1.00 79.63 C
ATOM 110 O ASN A 10 39.405 5.000 10.574 1.00 79.63 O
TER
END
"""
pdb_str_ref = """\
CRYST1 49.945 53.842 33.425 90.00 90.00 90.00 P 1
ATOM 5466 N ASN C 236 10.328 45.698 25.449 1.00 60.13 N
ATOM 5467 CA ASN C 236 8.971 45.973 25.787 1.00 60.52 C
ATOM 5468 C ASN C 236 8.271 44.664 25.724 1.00 60.49 C
ATOM 5469 O ASN C 236 7.276 44.532 25.017 1.00 60.60 O
ATOM 5470 CB ASN C 236 8.337 46.962 24.776 1.00 60.55 C
ATOM 5471 CG ASN C 236 7.235 47.762 25.415 1.00 61.75 C
ATOM 5472 OD1 ASN C 236 6.331 47.222 26.063 1.00 62.86 O
ATOM 5473 ND2 ASN C 236 7.315 49.079 25.302 1.00 61.22 N
ATOM 5474 N LEU C 237 8.820 43.663 26.441 1.00 60.55 N
ATOM 5475 CA LEU C 237 8.420 42.305 26.286 1.00 60.48 C
ATOM 5476 C LEU C 237 8.713 41.508 27.558 1.00 60.06 C
ATOM 5477 O LEU C 237 7.907 41.421 28.503 1.00 60.41 O
ATOM 5478 CB LEU C 237 9.159 41.598 25.114 1.00 60.91 C
ATOM 5479 CG LEU C 237 9.365 42.136 23.662 1.00 62.25 C
ATOM 5480 CD1 LEU C 237 10.605 42.996 23.496 1.00 63.18 C
ATOM 5481 CD2 LEU C 237 9.419 40.966 22.765 1.00 62.89 C
TER
ATOM 1 N THR A 3 40.527 19.363 20.612 1.00 80.52 N
ATOM 2 CA THR A 3 41.278 18.625 19.636 1.00 80.52 C
ATOM 4 CB THR A 3 40.971 17.090 19.710 1.00 79.62 C
ATOM 6 OG1 THR A 3 40.039 16.849 20.760 1.00 79.62 O
ATOM 8 CG2 THR A 3 42.308 16.246 19.999 1.00 79.62 C
ATOM 12 C THR A 3 40.899 19.134 18.229 1.00 80.52 C
ATOM 13 O THR A 3 39.780 19.542 17.983 1.00 80.52 O
ATOM 17 N GLY A 4 41.890 19.246 17.384 1.00 79.31 N
ATOM 18 CA GLY A 4 41.732 19.850 16.092 1.00 79.31 C
ATOM 21 C GLY A 4 41.306 18.930 14.985 1.00 79.31 C
ATOM 22 O GLY A 4 40.121 18.885 14.657 1.00 79.31 O
ATOM 24 N ALA A 5 42.279 18.233 14.402 1.00 78.81 N
ATOM 25 CA ALA A 5 41.969 17.264 13.392 1.00 78.81 C
ATOM 27 CB ALA A 5 42.474 17.741 12.001 1.00 77.40 C
ATOM 31 C ALA A 5 42.643 15.914 13.751 1.00 78.81 C
ATOM 32 O ALA A 5 43.503 15.474 12.983 1.00 78.81 O
ATOM 34 N GLN A 6 42.216 15.310 14.835 1.00 80.55 N
ATOM 35 CA GLN A 6 42.871 14.115 15.363 1.00 80.55 C
ATOM 37 CB GLN A 6 43.590 14.383 16.698 1.00 79.84 C
ATOM 40 CG GLN A 6 44.888 15.121 16.536 1.00 79.84 C
ATOM 43 CD GLN A 6 44.671 16.613 16.295 1.00 79.84 C
ATOM 44 OE1 GLN A 6 44.164 17.330 17.155 1.00 79.84 O
ATOM 45 NE2 GLN A 6 45.100 17.105 15.149 1.00 79.84 N
ATOM 48 C GLN A 6 41.888 12.972 15.564 1.00 80.55 C
ATOM 49 O GLN A 6 42.024 12.228 16.514 1.00 80.55 O
ATOM 51 N VAL A 7 40.933 12.858 14.656 1.00 79.06 N
ATOM 52 CA VAL A 7 40.101 11.677 14.619 1.00 79.06 C
ATOM 54 CB VAL A 7 38.947 11.709 15.573 1.00 78.99 C
ATOM 56 CG1 VAL A 7 39.330 11.128 16.941 1.00 78.99 C
ATOM 60 CG2 VAL A 7 38.334 13.128 15.699 1.00 78.99 C
ATOM 64 C VAL A 7 39.594 11.421 13.214 1.00 79.06 C
ATOM 65 O VAL A 7 38.407 11.279 12.954 1.00 79.06 O
ATOM 67 N TYR A 8 40.568 11.433 12.304 1.00 79.78 N
ATOM 68 CA TYR A 8 40.360 10.983 10.905 1.00 79.78 C
ATOM 70 CB TYR A 8 40.783 12.069 9.904 1.00 77.69 C
ATOM 73 CG TYR A 8 40.349 11.670 8.527 1.00 77.69 C
ATOM 74 CD1 TYR A 8 39.008 11.604 8.192 1.00 77.69 C
ATOM 76 CE1 TYR A 8 38.600 11.184 6.931 1.00 77.69 C
ATOM 78 CZ TYR A 8 39.528 10.864 5.979 1.00 77.69 C
ATOM 79 OH TYR A 8 39.195 10.466 4.696 1.00 77.69 O
ATOM 81 CE2 TYR A 8 40.880 10.918 6.304 1.00 77.69 C
ATOM 83 CD2 TYR A 8 41.286 11.303 7.563 1.00 77.69 C
ATOM 85 C TYR A 8 41.107 9.702 10.624 1.00 79.78 C
ATOM 86 O TYR A 8 40.892 9.064 9.584 1.00 79.78 O
ATOM 88 N ALA A 9 42.003 9.312 11.526 1.00 79.03 N
ATOM 89 CA ALA A 9 42.888 8.166 11.317 1.00 79.02 C
ATOM 91 CB ALA A 9 44.052 8.246 12.231 1.00 79.18 C
ATOM 95 C ALA A 9 42.102 6.856 11.504 1.00 79.02 C
ATOM 96 O ALA A 9 42.154 5.981 10.647 1.00 79.02 O
ATOM 98 N ASN A 10 41.404 6.751 12.642 1.00 79.63 N
ATOM 99 CA ASN A 10 40.465 5.684 12.913 1.00 79.63 C
ATOM 101 CB ASN A 10 39.947 5.766 14.373 1.00 78.91 C
ATOM 104 CG ASN A 10 41.037 5.501 15.391 1.00 78.91 C
ATOM 105 OD1 ASN A 10 42.073 4.895 15.058 1.00 78.91 O
ATOM 106 ND2 ASN A 10 40.820 5.957 16.635 1.00 78.91 N
ATOM 109 C ASN A 10 39.283 5.748 11.958 1.00 79.63 C
ATOM 110 O ASN A 10 39.365 5.382 10.797 1.00 79.63 O
TER
"""
ref_file = open("ref.pdb", 'w')
ref_file.write(pdb_str_ref)
ref_file.close()
log = cStringIO.StringIO()
# log = sys.stdout
def_pars = reference_model_params
all_pars = def_pars.fetch().extract()
all_pars.reference_model.file = 'ref.pdb'
all_pars.reference_model.enabled = True
model = mmtbx.model.manager(
model_input = iotbx.pdb.input(lines=flex.split_lines(pdb_str_original),
source_info=None),
process_input=True)
pdb_h = model.get_hierarchy()
rm = reference_model(
model=model,
reference_file_list=['ref.pdb'],
params=all_pars.reference_model,
log=log)
assert rm.get_n_proxies() == 36, \
"Expecting 36 proxies, got %d" % rm.get_n_proxies()
os.remove("ref.pdb")
def exercise_cutted_residue(mon_lib_srv, ener_lib):
pdb_str_original = """\
CRYST1 117.739 195.224 119.094 90.00 101.60 90.00 P 1 21 1
ATOM 6368 N THR K 332 4.163 72.088 52.141 1.00171.28 N
ATOM 6369 CA THR K 332 2.830 71.741 52.608 1.00153.71 C
ATOM 6370 C THR K 332 1.990 70.958 51.609 1.00132.45 C
ATOM 6371 O THR K 332 2.224 71.000 50.405 1.00130.38 O
ATOM 6372 CB THR K 332 2.047 72.996 53.035 1.00155.45 C
ATOM 6373 N VAL K 333 1.006 70.246 52.144 1.00121.58 N
ATOM 6374 CA VAL K 333 0.085 69.440 51.360 1.00129.11 C
ATOM 6375 C VAL K 333 -1.326 69.771 51.818 1.00146.57 C
ATOM 6376 O VAL K 333 -1.517 70.242 52.935 1.00151.92 O
ATOM 6377 CB VAL K 333 0.342 67.942 51.562 1.00126.37 C
ATOM 6378 N SER K 334 -2.318 69.535 50.968 1.00156.08 N
ATOM 6379 CA SER K 334 -3.687 69.866 51.335 1.00158.16 C
ATOM 6380 C SER K 334 -4.197 69.116 52.555 1.00157.55 C
ATOM 6381 O SER K 334 -4.066 67.905 52.664 1.00161.93 O
ATOM 6382 CB SER K 334 -4.630 69.614 50.166 1.00162.09 C
ATOM 6383 OG SER K 334 -5.836 69.041 50.632 1.00170.98 O
END
"""
pdb_str_ref = """\
CRYST1 117.739 195.224 119.094 90.00 101.60 90.00 P 1 21 1
ATOM 1 N THR G 332 4.195 72.012 51.895 1.00171.28 N
ATOM 2 CA THR G 332 2.946 71.699 52.580 1.00153.71 C
ATOM 3 C THR G 332 1.980 70.971 51.651 1.00132.45 C
ATOM 4 O THR G 332 2.092 71.062 50.429 1.00130.38 O
ATOM 5 CB THR G 332 2.291 72.982 53.125 1.00 20.00 C
ATOM 6 OG1 THR G 332 2.036 73.887 52.046 1.00 20.00 O
ATOM 7 CG2 THR G 332 3.269 73.749 54.003 1.00 20.00 C
ATOM 8 N VAL G 333 1.033 70.248 52.240 1.00121.58 N
ATOM 9 CA VAL G 333 0.047 69.503 51.468 1.00129.11 C
ATOM 10 C VAL G 333 -1.363 69.905 51.883 1.00146.57 C
ATOM 11 O VAL G 333 -1.552 70.599 52.882 1.00151.92 O
ATOM 12 CB VAL G 333 0.216 67.983 51.643 1.00 20.00 C
ATOM 13 CG1 VAL G 333 -0.905 67.237 50.935 1.00 20.00 C
ATOM 14 CG2 VAL G 333 1.574 67.534 51.125 1.00 20.00 C
ATOM 15 N SER G 334 -2.351 69.465 51.111 1.00156.08 N
ATOM 16 CA SER G 334 -3.745 69.778 51.397 1.00158.16 C
ATOM 17 C SER G 334 -4.297 68.870 52.492 1.00157.55 C
ATOM 18 O SER G 334 -3.964 67.686 52.556 1.00161.93 O
ATOM 19 CB SER G 334 -4.595 69.652 50.131 1.00162.09 C
ATOM 20 OG SER G 334 -5.954 69.950 50.396 1.00170.98 O
"""
params_text = """\
reference_model {
reference_group {
reference = chain 'G'
selection = chain 'K'
file_name = "ref.pdb"
}
}
"""
ref_file = open("ref.pdb", 'w')
ref_file.write(pdb_str_ref)
ref_file.close()
log = cStringIO.StringIO()
# log = sys.stdout
# orig_file = open("start.pdb", "w")
# orig_file.write(pdb_str_original)
# orig_file.close()
def_pars = reference_model_params
pars = iotbx.phil.parse(params_text)
all_pars = def_pars.fetch(pars).extract()
all_pars.reference_model.enabled = True
model = mmtbx.model.manager(
model_input = iotbx.pdb.input(lines=flex.split_lines(pdb_str_original),
source_info=None),
process_input=True)
pdb_h = model.get_hierarchy()
rm = reference_model(
model=model,
reference_file_list=['ref.pdb'],
params=all_pars.reference_model,
log=log)
rm.show_reference_summary(log=log)
new_h = pdb_h.deep_copy()
xray_structure = new_h.extract_xray_structure()
rm.set_rotamer_to_reference(
xray_structure=xray_structure)
new_h.adopt_xray_structure(xray_structure)
r1 = rotalyze(pdb_hierarchy=new_h, outliers_only=False)
assert r1.n_outliers == 0
def exercise_dna(mon_lib_srv, ener_lib):
pdb_str_original = """\
CRYST1 25.287 40.217 65.471 90.00 90.00 90.00 P 21 21 21 8
SCALE1 0.039546 0.000000 0.000000 0.00000
SCALE2 0.000000 0.024865 0.000000 0.00000
SCALE3 0.000000 0.000000 0.015274 0.00000
ATOM 80 P DA A 5 -8.062 -5.965 -15.755 1.00 42.17 P
ATOM 81 OP1 DA A 5 -8.426 -7.228 -16.405 1.00 50.61 O
ATOM 82 OP2 DA A 5 -8.689 -5.557 -14.457 1.00 51.75 O
ATOM 83 O5' DA A 5 -6.496 -5.961 -15.638 1.00 34.89 O
ATOM 84 C5' DA A 5 -5.791 -6.321 -16.790 1.00 30.71 C
ATOM 85 C4' DA A 5 -4.355 -5.917 -16.600 1.00 34.43 C
ATOM 86 O4' DA A 5 -4.303 -4.509 -16.239 1.00 33.96 O
ATOM 87 C3' DA A 5 -3.630 -6.687 -15.491 1.00 35.56 C
ATOM 88 O3' DA A 5 -2.407 -7.257 -16.020 1.00 33.08 O
ATOM 89 C2' DA A 5 -3.531 -5.654 -14.384 1.00 32.41 C
ATOM 90 C1' DA A 5 -3.435 -4.334 -15.130 1.00 28.44 C
ATOM 91 N9 DA A 5 -3.904 -3.143 -14.449 1.00 28.37 N
ATOM 92 C8 DA A 5 -5.187 -2.933 -14.022 1.00 27.53 C
ATOM 93 N7 DA A 5 -5.401 -1.724 -13.565 1.00 29.33 N
ATOM 94 C5 DA A 5 -4.187 -1.082 -13.747 1.00 23.78 C
ATOM 95 C6 DA A 5 -3.761 0.226 -13.474 1.00 25.22 C
ATOM 96 N6 DA A 5 -4.519 1.150 -12.896 1.00 25.69 N
ATOM 97 N1 DA A 5 -2.485 0.535 -13.749 1.00 24.39 N
ATOM 98 C2 DA A 5 -1.712 -0.389 -14.320 1.00 24.89 C
ATOM 99 N3 DA A 5 -2.001 -1.641 -14.653 1.00 28.33 N
ATOM 100 C4 DA A 5 -3.268 -1.935 -14.326 1.00 27.45 C
ATOM 101 P DA A 6 -1.382 -8.057 -15.083 1.00 33.49 P
ATOM 102 OP1 DA A 6 -0.596 -8.971 -15.989 1.00 35.26 O
ATOM 103 OP2 DA A 6 -2.097 -8.481 -13.890 1.00 34.48 O
ATOM 104 O5' DA A 6 -0.480 -6.949 -14.401 1.00 31.72 O
ATOM 105 C5' DA A 6 0.398 -6.138 -15.188 1.00 28.12 C
ATOM 106 C4' DA A 6 1.219 -5.272 -14.269 1.00 22.57 C
ATOM 107 O4' DA A 6 0.380 -4.203 -13.784 1.00 23.34 O
ATOM 108 C3' DA A 6 1.783 -5.982 -13.049 1.00 23.61 C
ATOM 109 O3' DA A 6 3.202 -5.785 -13.150 1.00 22.60 O
ATOM 110 C2' DA A 6 1.110 -5.289 -11.881 1.00 22.21 C
ATOM 111 C1' DA A 6 0.653 -3.958 -12.418 1.00 20.89 C
ATOM 112 N9 DA A 6 -0.561 -3.398 -11.831 1.00 21.71 N
ATOM 113 C8 DA A 6 -1.777 -4.017 -11.666 1.00 23.62 C
ATOM 114 N7 DA A 6 -2.693 -3.249 -11.139 1.00 23.57 N
ATOM 115 C5 DA A 6 -2.071 -2.016 -11.029 1.00 20.29 C
ATOM 116 C6 DA A 6 -2.506 -0.774 -10.519 1.00 20.33 C
ATOM 117 N6 DA A 6 -3.763 -0.525 -10.122 1.00 20.36 N
ATOM 118 N1 DA A 6 -1.604 0.233 -10.486 1.00 20.84 N
ATOM 119 C2 DA A 6 -0.341 -0.023 -10.868 1.00 21.15 C
ATOM 120 N3 DA A 6 0.174 -1.126 -11.378 1.00 22.91 N
ATOM 121 C4 DA A 6 -0.746 -2.101 -11.433 1.00 20.00 C
ATOM 122 P DT A 7 4.283 -6.215 -12.051 1.00 23.53 P
ATOM 123 OP1 DT A 7 5.598 -6.398 -12.780 1.00 27.73 O
ATOM 124 OP2 DT A 7 3.774 -7.297 -11.205 1.00 24.18 O
ATOM 125 O5' DT A 7 4.350 -4.948 -11.106 1.00 22.94 O
ATOM 126 C5' DT A 7 4.668 -3.709 -11.633 1.00 21.30 C
ATOM 127 C4' DT A 7 4.525 -2.656 -10.580 1.00 20.84 C
ATOM 128 O4' DT A 7 3.138 -2.512 -10.296 1.00 19.94 O
ATOM 129 C3' DT A 7 5.205 -2.966 -9.250 1.00 20.02 C
ATOM 130 O3' DT A 7 6.280 -2.035 -9.099 1.00 23.74 O
ATOM 131 C2' DT A 7 4.144 -2.717 -8.200 1.00 19.47 C
ATOM 132 C1' DT A 7 3.048 -2.015 -8.962 1.00 20.12 C
ATOM 133 N1 DT A 7 1.641 -2.197 -8.524 1.00 20.27 N
ATOM 134 C2 DT A 7 0.957 -1.108 -8.030 1.00 18.61 C
ATOM 135 O2 DT A 7 1.430 0.017 -7.926 1.00 19.56 O
ATOM 136 N3 DT A 7 -0.344 -1.365 -7.721 1.00 18.89 N
ATOM 137 C4 DT A 7 -1.018 -2.563 -7.836 1.00 21.94 C
ATOM 138 O4 DT A 7 -2.200 -2.640 -7.497 1.00 23.57 O
ATOM 139 C5 DT A 7 -0.226 -3.674 -8.271 1.00 18.09 C
ATOM 140 C7 DT A 7 -0.860 -5.022 -8.351 1.00 19.35 C
ATOM 141 C6 DT A 7 1.065 -3.446 -8.562 1.00 17.66 C
ATOM 142 P DT A 8 7.284 -1.980 -7.857 1.00 26.43 P
ATOM 143 OP1 DT A 8 8.611 -1.444 -8.278 1.00 28.45 O
ATOM 144 OP2 DT A 8 7.248 -3.298 -7.198 1.00 27.17 O
ATOM 145 O5' DT A 8 6.613 -0.927 -6.882 1.00 25.09 O
ATOM 146 C5' DT A 8 6.357 0.403 -7.340 1.00 24.67 C
ATOM 147 C4' DT A 8 5.543 1.125 -6.301 1.00 23.10 C
ATOM 148 O4' DT A 8 4.228 0.541 -6.229 1.00 23.60 O
ATOM 149 C3' DT A 8 6.127 1.057 -4.884 1.00 25.21 C
ATOM 150 O3' DT A 8 6.507 2.380 -4.493 1.00 28.93 O
ATOM 151 C2' DT A 8 5.018 0.434 -4.050 1.00 23.32 C
ATOM 152 C1' DT A 8 3.795 0.667 -4.883 1.00 22.06 C
ATOM 153 N1 DT A 8 2.713 -0.291 -4.689 1.00 19.79 N
ATOM 154 C2 DT A 8 1.466 0.223 -4.414 1.00 18.40 C
ATOM 155 O2 DT A 8 1.263 1.399 -4.157 1.00 20.56 O
ATOM 156 N3 DT A 8 0.484 -0.716 -4.337 1.00 19.20 N
ATOM 157 C4 DT A 8 0.588 -2.075 -4.597 1.00 18.45 C
ATOM 158 O4 DT A 8 -0.397 -2.789 -4.538 1.00 21.38 O
ATOM 159 C5 DT A 8 1.920 -2.549 -4.859 1.00 17.02 C
ATOM 160 C7 DT A 8 2.126 -4.006 -5.116 1.00 20.50 C
ATOM 161 C6 DT A 8 2.895 -1.634 -4.959 1.00 19.29 C
TER 245 DG A 12
ATOM 325 P DA B 17 -10.220 1.260 -1.207 1.00 27.94 P
ATOM 326 OP1 DA B 17 -11.370 2.143 -0.856 1.00 34.83 O
ATOM 327 OP2 DA B 17 -10.221 0.599 -2.553 1.00 31.17 O
ATOM 328 O5' DA B 17 -8.842 2.020 -1.098 1.00 26.12 O
ATOM 329 C5' DA B 17 -8.558 2.683 0.094 1.00 25.41 C
ATOM 330 C4' DA B 17 -7.407 3.619 -0.107 1.00 26.38 C
ATOM 331 O4' DA B 17 -6.208 2.886 -0.440 1.00 24.41 O
ATOM 332 C3' DA B 17 -7.600 4.631 -1.214 1.00 27.57 C
ATOM 333 O3' DA B 17 -6.972 5.834 -0.764 1.00 29.89 O
ATOM 334 C2' DA B 17 -6.902 3.980 -2.406 1.00 26.29 C
ATOM 335 C1' DA B 17 -5.771 3.225 -1.781 1.00 23.13 C
ATOM 336 N9 DA B 17 -5.444 1.986 -2.460 1.00 22.66 N
ATOM 337 C8 DA B 17 -6.295 0.942 -2.750 1.00 23.38 C
ATOM 338 N7 DA B 17 -5.700 -0.094 -3.288 1.00 20.62 N
ATOM 339 C5 DA B 17 -4.344 0.242 -3.234 1.00 20.59 C
ATOM 340 C6 DA B 17 -3.178 -0.447 -3.603 1.00 17.89 C
ATOM 341 N6 DA B 17 -3.184 -1.685 -4.072 1.00 20.22 N
ATOM 342 N1 DA B 17 -1.995 0.205 -3.497 1.00 19.61 N
ATOM 343 C2 DA B 17 -1.992 1.465 -3.030 1.00 20.38 C
ATOM 344 N3 DA B 17 -3.021 2.207 -2.621 1.00 20.80 N
ATOM 345 C4 DA B 17 -4.182 1.540 -2.774 1.00 19.17 C
ATOM 346 P DA B 18 -6.994 7.132 -1.670 1.00 32.91 P
ATOM 347 OP1 DA B 18 -6.817 8.281 -0.798 1.00 37.55 O
ATOM 348 OP2 DA B 18 -8.060 7.037 -2.636 1.00 31.04 O
ATOM 349 O5' DA B 18 -5.659 7.052 -2.535 1.00 30.20 O
ATOM 350 C5' DA B 18 -4.377 7.074 -1.958 1.00 30.19 C
ATOM 351 C4' DA B 18 -3.354 6.838 -3.036 1.00 28.09 C
ATOM 352 O4' DA B 18 -3.424 5.481 -3.484 1.00 26.27 O
ATOM 353 C3' DA B 18 -3.545 7.708 -4.286 1.00 29.73 C
ATOM 354 O3' DA B 18 -2.469 8.627 -4.273 1.00 34.73 O
ATOM 355 C2' DA B 18 -3.566 6.715 -5.433 1.00 27.32 C
ATOM 356 C1' DA B 18 -3.010 5.448 -4.841 1.00 24.83 C
ATOM 357 N9 DA B 18 -3.488 4.196 -5.410 1.00 23.72 N
ATOM 358 C8 DA B 18 -4.794 3.799 -5.530 1.00 20.51 C
ATOM 359 N7 DA B 18 -4.937 2.581 -5.985 1.00 22.85 N
ATOM 360 C5 DA B 18 -3.636 2.147 -6.189 1.00 20.87 C
ATOM 361 C6 DA B 18 -3.111 0.950 -6.675 1.00 19.34 C
ATOM 362 N6 DA B 18 -3.852 -0.099 -7.028 1.00 21.88 N
ATOM 363 N1 DA B 18 -1.767 0.849 -6.776 1.00 19.77 N
ATOM 364 C2 DA B 18 -1.023 1.872 -6.374 1.00 21.42 C
ATOM 365 N3 DA B 18 -1.392 3.050 -5.910 1.00 22.19 N
ATOM 366 C4 DA B 18 -2.734 3.129 -5.836 1.00 21.41 C
ATOM 367 P DT B 19 -2.064 9.546 -5.497 1.00 40.82 P
ATOM 368 OP1 DT B 19 -1.281 10.615 -4.939 1.00 44.52 O
ATOM 369 OP2 DT B 19 -3.292 9.787 -6.271 1.00 44.69 O
ATOM 370 O5' DT B 19 -1.119 8.619 -6.355 1.00 30.72 O
ATOM 371 C5' DT B 19 0.059 8.093 -5.804 1.00 29.16 C
ATOM 372 C4' DT B 19 0.704 7.195 -6.832 1.00 26.15 C
ATOM 373 O4' DT B 19 -0.129 6.045 -7.087 1.00 26.00 O
ATOM 374 C3' DT B 19 0.941 7.859 -8.188 1.00 25.98 C
ATOM 375 O3' DT B 19 2.343 7.877 -8.376 1.00 30.07 O
ATOM 376 C2' DT B 19 0.207 6.968 -9.181 1.00 26.77 C
ATOM 377 C1' DT B 19 0.036 5.665 -8.443 1.00 25.87 C
ATOM 378 N1 DT B 19 -1.122 4.839 -8.816 1.00 24.60 N
ATOM 379 C2 DT B 19 -0.906 3.556 -9.283 1.00 22.21 C
ATOM 380 O2 DT B 19 0.197 3.084 -9.451 1.00 22.06 O
ATOM 381 N3 DT B 19 -2.038 2.833 -9.519 1.00 22.04 N
ATOM 382 C4 DT B 19 -3.339 3.262 -9.380 1.00 21.81 C
ATOM 383 O4 DT B 19 -4.247 2.495 -9.615 1.00 24.16 O
ATOM 384 C5 DT B 19 -3.499 4.613 -8.891 1.00 22.25 C
ATOM 385 C7 DT B 19 -4.879 5.143 -8.663 1.00 23.26 C
ATOM 386 C6 DT B 19 -2.396 5.327 -8.640 1.00 22.85 C
ATOM 387 P DT B 20 3.005 8.456 -9.725 1.00 32.03 P
ATOM 388 OP1 DT B 20 4.339 8.958 -9.284 1.00 35.31 O
ATOM 389 OP2 DT B 20 2.027 9.351 -10.442 1.00 33.99 O
ATOM 390 O5' DT B 20 3.144 7.102 -10.543 1.00 31.33 O
ATOM 391 C5' DT B 20 3.894 5.979 -10.032 1.00 28.60 C
ATOM 392 C4' DT B 20 3.851 4.840 -11.020 1.00 28.63 C
ATOM 393 O4' DT B 20 2.494 4.361 -11.145 1.00 26.47 O
ATOM 394 C3' DT B 20 4.300 5.211 -12.437 1.00 31.59 C
ATOM 395 O3' DT B 20 5.260 4.256 -12.875 1.00 39.07 O
ATOM 396 C2' DT B 20 3.027 5.147 -13.257 1.00 26.06 C
ATOM 397 C1' DT B 20 2.211 4.120 -12.529 1.00 24.42 C
ATOM 398 N1 DT B 20 0.757 4.123 -12.660 1.00 23.79 N
ATOM 399 C2 DT B 20 0.138 2.932 -12.972 1.00 25.04 C
ATOM 400 O2 DT B 20 0.741 1.921 -13.262 1.00 24.66 O
ATOM 401 N3 DT B 20 -1.229 2.977 -12.959 1.00 25.84 N
ATOM 402 C4 DT B 20 -2.022 4.071 -12.671 1.00 25.98 C
ATOM 403 O4 DT B 20 -3.234 3.948 -12.646 1.00 28.14 O
ATOM 404 C5 DT B 20 -1.311 5.298 -12.387 1.00 22.81 C
ATOM 405 C7 DT B 20 -2.094 6.540 -12.092 1.00 27.47 C
ATOM 406 C6 DT B 20 0.028 5.263 -12.401 1.00 26.29 C
TER 490 DG B 24
"""
params_text = """\
reference_model {
reference_group {
reference = chain 'A'
selection = chain 'A'
file_name = "ref.pdb"
}
reference_group {
reference = chain 'B'
selection = chain 'B'
file_name = "ref.pdb"
}
}
"""
ref_file = open("ref.pdb", 'w')
ref_file.write(pdb_str_original)
ref_file.close()
log = cStringIO.StringIO()
# log = sys.stdout
model = mmtbx.model.manager(
model_input = iotbx.pdb.input(lines=flex.split_lines(pdb_str_original),
source_info=None),
process_input=True)
pdb_h = model.get_hierarchy()
for include_chains in [True, False]:
def_pars = reference_model_params
pars = iotbx.phil.parse(params_text)
all_pars = None
if include_chains:
all_pars = def_pars.fetch(pars).extract()
all_pars.reference_model.enabled = True
else:
all_pars = def_pars.extract()
all_pars.reference_model.enabled = True
all_pars.reference_model.file = "ref.pdb"
rm = reference_model(
model=model,
reference_file_list=['ref.pdb'],
params=all_pars.reference_model,
log=log)
rm.show_reference_summary(log=log)
assert rm.get_n_proxies() == 74, \
"Expecting 74 proxies, got %d" % rm.get_n_proxies()
log_strings = log.getvalue().split("\n")
for needed_string in [
" DA A 5 <=====> DA A 5",
" DA A 6 <=====> DA A 6",
" DT A 7 <=====> DT A 7",
" DT A 8 <=====> DT A 8",
" DA B 17 <=====> DA B 17",
" DA B 18 <=====> DA B 18",
" DT B 19 <=====> DT B 19",
" DT B 20 <=====> DT B 20",
]:
assert needed_string in log_strings, "'%s' not in log!" % needed_string
def exercise_3chains_self(mon_lib_srv, ener_lib):
"""
Test reference model, 3 chains, reference is the same and selections are
supposed to be mixed, like ref=(A or Bref or C) sel=(Aref, B, Cref) """
pdb_str_original = """\
CRYST1 129.069 83.165 84.393 90.00 90.00 90.00 P 1
ATOM 1 N GLN A 1 118.638 78.165 29.859 1.00 32.70 A N
ATOM 2 CA GLN A 1 118.742 77.022 30.759 1.00 34.10 A C
ATOM 3 CB GLN A 1 117.844 77.222 31.984 1.00 34.85 A C
ATOM 4 CG GLN A 1 118.008 76.159 33.064 1.00 36.31 A C
ATOM 5 CD GLN A 1 117.167 76.441 34.295 1.00 37.02 A C
ATOM 6 OE1 GLN A 1 116.477 77.458 34.372 1.00 36.35 A O
ATOM 7 NE2 GLN A 1 117.221 75.538 35.268 1.00 38.45 A N
ATOM 8 C GLN A 1 118.377 75.725 30.039 1.00 35.48 A C
ATOM 9 O GLN A 1 119.251 75.008 29.552 1.00 35.59 A O
ATOM 10 N VAL A 2 117.083 75.432 29.969 1.00 36.49 A N
ATOM 11 CA VAL A 2 116.607 74.213 29.327 1.00 37.63 A C
ATOM 12 CB VAL A 2 115.168 73.893 29.751 1.00 38.86 A C
ATOM 13 CG1 VAL A 2 114.654 72.672 29.006 1.00 39.72 A C
ATOM 14 CG2 VAL A 2 115.096 73.682 31.255 1.00 39.93 A C
ATOM 15 C VAL A 2 116.698 74.327 27.811 1.00 36.86 A C
ATOM 16 O VAL A 2 116.042 75.175 27.207 1.00 36.25 A O
ATOM 17 N GLN A 3 117.506 73.466 27.200 1.00 37.05 A N
ATOM 18 CA GLN A 3 117.678 73.479 25.752 1.00 36.50 A C
ATOM 19 CB GLN A 3 118.915 74.294 25.365 1.00 35.23 A C
ATOM 20 CG GLN A 3 118.741 75.798 25.481 1.00 33.97 A C
ATOM 21 CD GLN A 3 119.955 76.561 24.994 1.00 32.60 A C
ATOM 22 OE1 GLN A 3 120.999 75.974 24.707 1.00 32.65 A O
ATOM 23 NE2 GLN A 3 119.823 77.879 24.893 1.00 31.40 A N
ATOM 24 C GLN A 3 117.794 72.072 25.176 1.00 37.49 A C
ATOM 25 O GLN A 3 118.315 71.161 25.827 1.00 38.37 A O
ATOM 26 N LEU A 4 117.302 71.907 23.951 1.00 37.44 A N
ATOM 27 CA LEU A 4 117.424 70.652 23.217 1.00 38.27 A C
ATOM 28 CB LEU A 4 116.107 69.872 23.227 1.00 39.05 A C
ATOM 29 CG LEU A 4 115.539 69.386 24.562 1.00 39.82 A C
ATOM 30 CD1 LEU A 4 114.720 70.467 25.254 1.00 39.45 A C
ATOM 31 CD2 LEU A 4 114.704 68.133 24.355 1.00 40.68 A C
ATOM 32 C LEU A 4 117.854 70.938 21.778 1.00 37.77 A C
ATOM 33 O LEU A 4 117.369 71.884 21.157 1.00 37.07 A O
ATOM 34 N LYS A 5 118.763 70.124 21.249 1.00 38.27 A N
ATOM 35 CA LYS A 5 119.265 70.333 19.895 1.00 37.96 A C
ATOM 36 CB LYS A 5 120.574 71.123 19.931 1.00 37.03 A C
ATOM 37 CG LYS A 5 121.114 71.505 18.561 1.00 36.53 A C
ATOM 38 CD LYS A 5 122.352 72.380 18.680 1.00 35.38 A C
ATOM 39 CE LYS A 5 122.875 72.783 17.311 1.00 34.77 A C
ATOM 40 NZ LYS A 5 124.069 73.666 17.413 1.00 33.45 A N
ATOM 41 C LYS A 5 119.467 69.009 19.161 1.00 39.11 A C
ATOM 42 O LYS A 5 120.063 68.075 19.695 1.00 39.98 A O
ATOM 43 N GLU A 6 118.969 68.936 17.931 1.00 39.24 A N
ATOM 44 CA GLU A 6 119.059 67.716 17.133 1.00 40.35 A C
ATOM 45 CB GLU A 6 117.809 67.541 16.267 1.00 40.77 A C
ATOM 46 CG GLU A 6 116.518 67.353 17.046 1.00 40.94 A C
ATOM 47 CD GLU A 6 115.905 68.664 17.505 1.00 39.97 A C
ATOM 48 OE1 GLU A 6 116.574 69.714 17.400 1.00 39.09 A O
ATOM 49 OE2 GLU A 6 114.743 68.644 17.962 1.00 40.09 A O
ATOM 50 C GLU A 6 120.296 67.709 16.241 1.00 40.24 A C
ATOM 51 O GLU A 6 120.601 68.697 15.574 1.00 39.29 A O
ATOM 52 N SER A 7 121.001 66.584 16.234 1.00 41.20 A N
ATOM 53 CA SER A 7 122.160 66.398 15.370 1.00 41.10 A C
ATOM 54 CB SER A 7 123.431 66.187 16.197 1.00 41.10 A C
ATOM 55 OG SER A 7 123.701 67.308 17.023 1.00 40.11 A O
ATOM 56 C SER A 7 121.930 65.211 14.444 1.00 42.23 A C
ATOM 57 O SER A 7 121.956 64.061 14.881 1.00 43.35 A O
ATOM 58 N GLY A 8 121.697 65.499 13.167 1.00 42.03 A N
ATOM 59 CA GLY A 8 121.422 64.464 12.188 1.00 43.17 A C
ATOM 60 C GLY A 8 122.302 64.540 10.956 1.00 42.71 A C
ATOM 61 O GLY A 8 123.142 65.433 10.843 1.00 41.37 A O
ATOM 62 N PRO A 9 122.117 63.592 10.024 1.00 43.74 A N
ATOM 63 CD PRO A 9 121.218 62.434 10.175 1.00 45.40 A C
ATOM 64 CA PRO A 9 122.906 63.513 8.790 1.00 43.28 A C
ATOM 65 CB PRO A 9 122.804 62.037 8.414 1.00 44.75 A C
ATOM 66 CG PRO A 9 121.465 61.631 8.919 1.00 46.12 A C
ATOM 67 C PRO A 9 122.361 64.390 7.666 1.00 42.79 A C
ATOM 68 O PRO A 9 123.122 64.826 6.800 1.00 41.70 A O
ATOM 69 N GLY A 10 121.055 64.637 7.679 1.00 43.56 A N
ATOM 70 CA GLY A 10 120.425 65.454 6.659 1.00 43.32 A C
ATOM 71 C GLY A 10 119.971 64.668 5.445 1.00 44.47 A C
ATOM 72 O GLY A 10 118.831 64.801 5.000 1.00 45.36 A O
TER
ATOM 1645 N ASP B 1 94.462 51.713 21.314 1.00 38.68 B N
ATOM 1646 CA ASP B 1 94.907 52.727 20.365 1.00 39.77 B C
ATOM 1647 CB ASP B 1 94.995 52.139 18.956 1.00 40.01 B C
ATOM 1648 CG ASP B 1 95.754 53.035 18.000 1.00 41.28 B C
ATOM 1649 OD1 ASP B 1 96.565 53.857 18.476 1.00 41.67 B O
ATOM 1650 OD2 ASP B 1 95.544 52.913 16.774 1.00 41.91 B O
ATOM 1651 C ASP B 1 93.966 53.928 20.386 1.00 40.53 B C
ATOM 1652 O ASP B 1 92.746 53.766 20.440 1.00 40.33 B O
ATOM 1653 N ILE B 2 94.537 55.130 20.341 1.00 41.47 B N
ATOM 1654 CA ILE B 2 93.756 56.358 20.452 1.00 42.47 B C
ATOM 1655 CB ILE B 2 94.214 57.190 21.663 1.00 42.52 B C
ATOM 1656 CG2 ILE B 2 93.396 58.470 21.778 1.00 43.79 B C
ATOM 1657 CG1 ILE B 2 94.108 56.362 22.947 1.00 41.45 B C
ATOM 1658 CD1 ILE B 2 94.570 57.093 24.191 1.00 41.67 B C
ATOM 1659 C ILE B 2 93.841 57.180 19.168 1.00 43.93 B C
ATOM 1660 O ILE B 2 94.928 57.405 18.636 1.00 44.25 B O
ATOM 1661 N VAL B 3 92.687 57.629 18.680 1.00 45.00 B N
ATOM 1662 CA VAL B 3 92.612 58.382 17.432 1.00 46.83 B C
ATOM 1663 CB VAL B 3 91.712 57.671 16.414 1.00 47.07 B C
ATOM 1664 CG1 VAL B 3 91.747 58.390 15.074 1.00 49.24 B C
ATOM 1665 CG2 VAL B 3 92.136 56.224 16.262 1.00 45.38 B C
ATOM 1666 C VAL B 3 92.111 59.799 17.681 1.00 48.52 B C
ATOM 1667 O VAL B 3 91.117 60.002 18.375 1.00 48.59 B O
ATOM 1668 N MET B 4 92.799 60.776 17.099 1.00 49.99 B N
ATOM 1669 CA MET B 4 92.460 62.181 17.296 1.00 51.83 B C
ATOM 1670 CB MET B 4 93.658 62.944 17.866 1.00 51.35 B C
ATOM 1671 CG MET B 4 94.339 62.254 19.040 1.00 48.92 B C
ATOM 1672 SD MET B 4 93.360 62.268 20.552 1.00 48.39 B S
ATOM 1673 CE MET B 4 93.448 63.998 20.987 1.00 49.97 B C
ATOM 1674 C MET B 4 92.005 62.828 15.992 1.00 54.57 B C
ATOM 1675 O MET B 4 92.701 62.758 14.979 1.00 55.32 B O
ATOM 1676 N SER B 5 90.836 63.461 16.023 1.00 56.25 B N
ATOM 1677 CA SER B 5 90.302 64.140 14.847 1.00 59.25 B C
ATOM 1678 CB SER B 5 89.052 63.420 14.334 1.00 59.12 B C
ATOM 1679 OG SER B 5 89.335 62.070 14.010 1.00 56.93 B O
ATOM 1680 C SER B 5 89.975 65.596 15.161 1.00 61.44 B C
ATOM 1681 O SER B 5 89.374 65.889 16.191 1.00 60.75 B O
ATOM 1682 N GLN B 6 90.363 66.508 14.274 1.00 64.02 B N
ATOM 1683 CA GLN B 6 90.108 67.930 14.492 1.00 65.54 B C
ATOM 1684 CB GLN B 6 91.422 68.711 14.520 1.00 64.95 B C
ATOM 1685 CG GLN B 6 92.351 68.313 15.648 1.00 62.00 B C
ATOM 1686 CD GLN B 6 93.557 69.221 15.762 1.00 61.07 B C
ATOM 1687 OE1 GLN B 6 94.648 68.778 16.125 1.00 58.90 B O
ATOM 1688 NE2 GLN B 6 93.368 70.500 15.458 1.00 62.20 B N
ATOM 1689 C GLN B 6 89.182 68.526 13.435 1.00 68.28 B C
ATOM 1690 O GLN B 6 89.240 68.157 12.261 1.00 69.90 B O
ATOM 1691 N SER B 7 88.332 69.456 13.861 1.00 68.57 B N
ATOM 1692 CA SER B 7 87.413 70.129 12.949 1.00 70.24 B C
ATOM 1693 CB SER B 7 86.049 69.433 12.944 1.00 69.94 B C
ATOM 1694 OG SER B 7 86.154 68.099 12.477 1.00 69.41 B O
ATOM 1695 C SER B 7 87.253 71.595 13.333 1.00 69.83 B C
ATOM 1696 O SER B 7 87.048 71.909 14.503 1.00 68.66 B O
ATOM 1697 N PRO B 8 87.340 72.500 12.345 1.00 70.37 B N
ATOM 1698 CD PRO B 8 87.103 73.940 12.555 1.00 69.33 B C
ATOM 1699 CA PRO B 8 87.579 72.191 10.932 1.00 71.70 B C
ATOM 1700 CB PRO B 8 86.996 73.407 10.217 1.00 71.69 B C
ATOM 1701 CG PRO B 8 87.247 74.525 11.170 1.00 69.92 B C
ATOM 1702 C PRO B 8 89.059 72.030 10.600 1.00 71.13 B C
ATOM 1703 O PRO B 8 89.910 72.310 11.444 1.00 69.82 B O
ATOM 1704 N SER B 9 89.354 71.585 9.382 1.00 71.87 B N
ATOM 1705 CA SER B 9 90.732 71.404 8.940 1.00 70.91 B C
ATOM 1706 CB SER B 9 90.775 70.616 7.629 1.00 71.45 B C
ATOM 1707 OG SER B 9 89.977 71.234 6.633 1.00 71.72 B O
ATOM 1708 C SER B 9 91.432 72.749 8.770 1.00 69.14 B C
ATOM 1709 O SER B 9 92.628 72.878 9.037 1.00 67.60 B O
ATOM 1710 N SER B 10 90.677 73.747 8.324 1.00 69.19 B N
ATOM 1711 CA SER B 10 91.201 75.095 8.150 1.00 67.65 B C
ATOM 1712 CB SER B 10 92.017 75.200 6.860 1.00 66.83 B C
ATOM 1713 OG SER B 10 91.215 74.927 5.723 1.00 67.75 B O
ATOM 1714 C SER B 10 90.055 76.097 8.134 1.00 68.08 B C
ATOM 1715 O SER B 10 88.958 75.786 7.670 1.00 69.49 B O
TER
ATOM 3353 N GLN C 1 27.855 6.390 79.393 1.00 55.82 C N
ATOM 3354 CA GLN C 1 27.377 6.759 78.009 1.00 57.48 C C
ATOM 3355 CB GLN C 1 26.126 5.903 77.650 1.00 57.65 C C
ATOM 3356 CG GLN C 1 24.762 6.447 78.162 1.00 55.80 C C
ATOM 3357 CD GLN C 1 23.623 5.432 77.999 1.00 55.35 C C
ATOM 3358 OE1 GLN C 1 22.972 5.032 78.969 1.00 53.50 C O
ATOM 3359 NE2 GLN C 1 23.365 5.000 76.745 1.00 56.64 C N
ATOM 3360 C GLN C 1 27.097 8.250 77.886 1.00 56.46 C C
ATOM 3361 O GLN C 1 26.949 8.930 78.891 1.00 54.64 C O
ATOM 3362 N VAL C 2 27.019 8.808 76.660 1.00 57.14 C N
ATOM 3363 CA VAL C 2 26.719 10.217 76.428 1.00 55.41 C C
ATOM 3364 CB VAL C 2 27.931 10.987 75.916 1.00 55.45 C C
ATOM 3365 CG1 VAL C 2 27.552 12.336 75.269 1.00 53.41 C C
ATOM 3366 CG2 VAL C 2 28.864 11.254 77.110 1.00 54.84 C C
ATOM 3367 C VAL C 2 25.580 10.337 75.447 1.00 54.98 C C
ATOM 3368 O VAL C 2 25.617 9.750 74.367 1.00 56.38 C O
ATOM 3369 N GLN C 3 24.516 11.078 75.796 1.00 52.87 C N
ATOM 3370 CA GLN C 3 23.345 11.230 74.957 1.00 51.91 C C
ATOM 3371 CB GLN C 3 22.235 10.208 75.319 1.00 52.11 C C
ATOM 3372 CG GLN C 3 22.651 8.728 75.133 1.00 54.78 C C
ATOM 3373 CD GLN C 3 21.498 7.771 75.463 1.00 54.32 C C
ATOM 3374 OE1 GLN C 3 20.976 7.743 76.584 1.00 52.76 C O
ATOM 3375 NE2 GLN C 3 21.093 6.936 74.478 1.00 54.60 C N
ATOM 3376 C GLN C 3 22.755 12.621 75.095 1.00 49.15 C C
ATOM 3377 O GLN C 3 22.938 13.298 76.106 1.00 48.04 C O
ATOM 3378 N LEU C 4 22.017 13.068 74.065 1.00 47.90 C N
ATOM 3379 CA LEU C 4 21.268 14.320 74.040 1.00 45.28 C C
ATOM 3380 CB LEU C 4 21.893 15.309 73.052 1.00 44.69 C C
ATOM 3381 CG LEU C 4 23.323 15.776 73.331 1.00 45.36 C C
ATOM 3382 CD1 LEU C 4 23.813 16.685 72.216 1.00 44.60 C C
ATOM 3383 CD2 LEU C 4 23.407 16.483 74.670 1.00 44.38 C C
ATOM 3384 C LEU C 4 19.813 14.054 73.665 1.00 43.91 C C
ATOM 3385 O LEU C 4 19.518 13.665 72.535 1.00 44.12 C O
ATOM 3386 N GLN C 5 18.907 14.263 74.615 1.00 42.26 C N
ATOM 3387 CA GLN C 5 17.488 14.011 74.382 1.00 40.51 C C
ATOM 3388 CB GLN C 5 16.852 13.347 75.606 1.00 39.88 C C
ATOM 3389 CG GLN C 5 17.529 12.057 76.033 1.00 42.29 C C
ATOM 3390 CD GLN C 5 17.525 11.010 74.938 1.00 43.78 C C
ATOM 3391 OE1 GLN C 5 18.566 10.448 74.598 1.00 46.37 C O
ATOM 3392 NE2 GLN C 5 16.350 10.738 74.382 1.00 42.03 C N
ATOM 3393 C GLN C 5 16.760 15.304 74.047 1.00 38.05 C C
ATOM 3394 O GLN C 5 16.865 16.282 74.776 1.00 37.00 C O
ATOM 3395 N GLN C 6 16.020 15.312 72.945 1.00 37.12 C N
ATOM 3396 CA GLN C 6 15.338 16.529 72.520 1.00 34.96 C C
ATOM 3397 CB GLN C 6 15.647 16.828 71.051 1.00 35.40 C C
ATOM 3398 CG GLN C 6 17.121 17.076 70.774 1.00 37.30 C C
ATOM 3399 CD GLN C 6 17.370 17.631 69.387 1.00 37.26 C C
ATOM 3400 OE1 GLN C 6 18.353 17.285 68.732 1.00 38.80 C O
ATOM 3401 NE2 GLN C 6 16.478 18.503 68.932 1.00 35.44 C N
ATOM 3402 C GLN C 6 13.830 16.452 72.732 1.00 32.51 C C
ATOM 3403 O GLN C 6 13.230 15.380 72.646 1.00 32.39 C O
ATOM 3404 N SER C 7 13.227 17.604 73.013 1.00 30.41 C N
ATOM 3405 CA SER C 7 11.789 17.692 73.226 1.00 27.70 C C
ATOM 3406 CB SER C 7 11.431 19.008 73.916 1.00 25.77 C C
ATOM 3407 OG SER C 7 11.822 20.115 73.122 1.00 25.94 C O
ATOM 3408 C SER C 7 11.031 17.575 71.909 1.00 26.72 C C
ATOM 3409 O SER C 7 11.633 17.430 70.846 1.00 28.21 C O
ATOM 3410 N GLY C 8 9.707 17.636 71.984 1.00 24.02 C N
ATOM 3411 CA GLY C 8 8.882 17.597 70.792 1.00 22.64 C C
ATOM 3412 C GLY C 8 8.215 16.260 70.529 1.00 22.13 C C
ATOM 3413 O GLY C 8 8.319 15.342 71.342 1.00 22.76 C O
ATOM 3414 N PRO C 9 7.522 16.141 69.385 1.00 20.87 C N
ATOM 3415 CD PRO C 9 6.878 14.882 68.970 1.00 20.19 C C
ATOM 3416 CA PRO C 9 7.356 17.200 68.381 1.00 20.00 C C
ATOM 3417 CB PRO C 9 6.793 16.446 67.173 1.00 19.29 C C
ATOM 3418 CG PRO C 9 6.076 15.287 67.766 1.00 18.19 C C
ATOM 3419 C PRO C 9 6.401 18.308 68.821 1.00 17.07 C C
ATOM 3420 O PRO C 9 5.485 18.062 69.605 1.00 14.88 C O
ATOM 3421 N GLU C 10 6.626 19.516 68.314 1.00 16.93 C N
ATOM 3422 CA GLU C 10 5.849 20.679 68.723 1.00 14.34 C C
ATOM 3423 CB GLU C 10 6.778 21.768 69.269 1.00 15.71 C C
ATOM 3424 CG GLU C 10 7.498 21.386 70.554 1.00 17.24 C C
ATOM 3425 CD GLU C 10 6.559 21.288 71.742 1.00 14.84 C C
ATOM 3426 OE1 GLU C 10 5.821 22.262 71.999 1.00 12.41 C O
ATOM 3427 OE2 GLU C 10 6.556 20.236 72.416 1.00 15.30 C O
ATOM 3428 C GLU C 10 5.000 21.240 67.583 1.00 12.13 C C
ATOM 3429 O GLU C 10 5.457 21.355 66.443 1.00 13.28 C O
TER
END
"""
model = mmtbx.model.manager(
model_input = iotbx.pdb.input(lines=flex.split_lines(pdb_str_original),
source_info=None),
process_input=True)
pdb_h = model.get_hierarchy()
ref_h = pdb_h.deep_copy()
# pdb_h.atoms().reset_i_seq()
# ref_h.atoms().reset_i_seq()
log = cStringIO.StringIO()
# log = sys.stdout
def_pars = reference_model_params
all_pars = def_pars.fetch().extract()
all_pars.reference_model.use_starting_model_as_reference=True
all_pars.reference_model.enabled = True
rm = reference_model(
model=model,
reference_hierarchy_list=\
[model.get_hierarchy()],
params=all_pars.reference_model,
log=log)
rm.show_reference_summary(log=log)
assert rm.get_n_proxies() == 141, \
"Expecting 141 proxies, got %d" % rm.get_n_proxies()
log_strings = log.getvalue().split("\n")
# print "========"
# print "\n".join(log_strings)
# print "========"
for needed_string in [
"GLY A 8 <=====> GLY A 8",
"PRO A 9 <=====> PRO A 9",
"GLY A 10 <=====> GLY A 10",
"ASP B 1 <=====> ASP B 1",
"ILE B 2 <=====> ILE B 2",
"SER B 10 <=====> SER B 10",
"GLN C 1 <=====> GLN C 1",
"VAL C 2 <=====> VAL C 2",
]:
assert needed_string in log_strings, "'%s' not in log!" % needed_string
def run(args):
t0 = time.time()
import mmtbx.monomer_library
mon_lib_srv = mmtbx.monomer_library.server.server()
ener_lib = mmtbx.monomer_library.server.ener_lib()
exercise_reference_model(args, mon_lib_srv, ener_lib)
exercise_multiple_to_one(args, mon_lib_srv, ener_lib)
exercise_multiple_ncs_groups_found(mon_lib_srv, ener_lib)
exercise_cutted_residue(mon_lib_srv, ener_lib)
exercise_dna(mon_lib_srv, ener_lib)
exercise_3chains_self(mon_lib_srv, ener_lib)
print "OK. Time: %8.3f"%(time.time()-t0)
if (__name__ == "__main__"):
run(args=sys.argv[1:])
| 59.757015
| 82
| 0.507177
| 18,879
| 93,699
| 2.472589
| 0.09296
| 0.057069
| 0.01928
| 0.004799
| 0.410925
| 0.384683
| 0.352935
| 0.340724
| 0.333912
| 0.326392
| 0
| 0.452954
| 0.411093
| 93,699
| 1,567
| 83
| 59.79515
| 0.393005
| 0.008826
| 0
| 0.228495
| 0
| 0.625
| 0.833839
| 0.003506
| 0
| 0
| 0
| 0
| 0.020161
| 0
| null | null | 0
| 0.010081
| null | null | 0.001344
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
22f1a4989cf6171ade618e4181d0d1eda27df344
| 30,697
|
py
|
Python
|
cumulusci/core/dependencies/tests/test_dependencies.py
|
davisagli/CumulusCI
|
fd74c324ad3ff662484b159395c639879011e711
|
[
"BSD-3-Clause"
] | 109
|
2015-01-20T14:28:48.000Z
|
2018-08-31T12:12:39.000Z
|
cumulusci/core/dependencies/tests/test_dependencies.py
|
davisagli/CumulusCI
|
fd74c324ad3ff662484b159395c639879011e711
|
[
"BSD-3-Clause"
] | 365
|
2015-01-07T19:54:25.000Z
|
2018-09-11T15:10:02.000Z
|
cumulusci/core/dependencies/tests/test_dependencies.py
|
davisagli/CumulusCI
|
fd74c324ad3ff662484b159395c639879011e711
|
[
"BSD-3-Clause"
] | 125
|
2015-01-17T16:05:39.000Z
|
2018-09-06T19:05:00.000Z
|
import io
import os
from distutils.version import StrictVersion
from typing import List, Optional, Tuple
from unittest import mock
from zipfile import ZipFile
import pytest
from pydantic import ValidationError
from cumulusci.core.config.org_config import OrgConfig, VersionInfo
from cumulusci.core.config.project_config import BaseProjectConfig
from cumulusci.core.dependencies.dependencies import (
DynamicDependency,
GitHubDynamicDependency,
GitHubDynamicSubfolderDependency,
PackageNamespaceVersionDependency,
PackageVersionIdDependency,
StaticDependency,
UnmanagedGitHubRefDependency,
UnmanagedZipURLDependency,
parse_dependency,
)
from cumulusci.core.dependencies.resolvers import DependencyResolutionStrategy, Resolver
from cumulusci.core.exceptions import DependencyResolutionError
from cumulusci.salesforce_api.package_install import (
DEFAULT_PACKAGE_RETRY_OPTIONS,
PackageInstallOptions,
)
from cumulusci.utils.ziputils import zip_subfolder
class ConcreteDynamicDependency(DynamicDependency):
ref: Optional[str]
resolved: Optional[bool] = False
@property
def is_resolved(self):
return self.resolved
def resolve(
self, context: BaseProjectConfig, strategies: List[DependencyResolutionStrategy]
):
super().resolve(context, strategies)
self.resolved = True
@property
def name(self):
return ""
class MockResolver(Resolver):
def __init__(
self,
resolve_ref: Optional[str] = None,
resolve_dep: Optional[StaticDependency] = None,
):
self.ref = resolve_ref
self.dep = resolve_dep
def can_resolve(self, dep: DynamicDependency, context: BaseProjectConfig) -> bool:
return bool(self.dep and self.ref)
def resolve(
self, dep: DynamicDependency, context: BaseProjectConfig
) -> Tuple[Optional[str], Optional[StaticDependency]]:
return self.ref, self.dep
class MockBadResolver(Resolver):
def can_resolve(self, dep: DynamicDependency, context: BaseProjectConfig) -> bool:
return True
def resolve(
self, dep: DynamicDependency, context: BaseProjectConfig
) -> Tuple[Optional[str], Optional[StaticDependency]]:
raise DependencyResolutionError("Bad resolver")
class TestDynamicDependency:
@mock.patch("cumulusci.core.dependencies.resolvers.get_resolver")
def test_dynamic_dependency(self, get_resolver):
d = ConcreteDynamicDependency()
resolvers = [
MockResolver(),
MockResolver(
"aaaaaaaaaaaaaaaa",
PackageNamespaceVersionDependency(namespace="foo", version="1.0"),
),
]
get_resolver.side_effect = resolvers
d.resolve(
mock.Mock(),
[
DependencyResolutionStrategy.UNMANAGED_HEAD,
DependencyResolutionStrategy.COMMIT_STATUS_PREVIOUS_RELEASE_BRANCH,
],
)
assert d.package_dependency == PackageNamespaceVersionDependency(
namespace="foo", version="1.0"
)
assert d.ref == "aaaaaaaaaaaaaaaa"
@mock.patch("cumulusci.core.dependencies.resolvers.get_resolver")
def test_dynamic_dependency__twice(self, get_resolver):
d = ConcreteDynamicDependency()
resolvers = [
mock.Mock(
wraps=MockResolver(
"aaaaaaaaaaaaaaaa",
PackageNamespaceVersionDependency(namespace="foo", version="1.0"),
)
),
]
get_resolver.side_effect = resolvers
d.resolve(
mock.Mock(),
[
DependencyResolutionStrategy.UNMANAGED_HEAD,
DependencyResolutionStrategy.COMMIT_STATUS_PREVIOUS_RELEASE_BRANCH,
],
)
assert d.is_resolved
resolvers[0].resolve.assert_called_once()
d.resolve(
mock.Mock(),
[
DependencyResolutionStrategy.UNMANAGED_HEAD,
DependencyResolutionStrategy.COMMIT_STATUS_PREVIOUS_RELEASE_BRANCH,
],
)
assert d.is_resolved
resolvers[0].resolve.assert_called_once()
@mock.patch("cumulusci.core.dependencies.resolvers.get_resolver")
def test_dynamic_dependency_resolution_fails(self, get_resolver):
d = ConcreteDynamicDependency()
resolvers = [MockBadResolver()]
get_resolver.side_effect = resolvers
with pytest.raises(DependencyResolutionError):
d.resolve(mock.Mock(), [DependencyResolutionStrategy.UNMANAGED_HEAD])
@mock.patch("cumulusci.core.dependencies.resolvers.get_resolver")
def test_dynamic_dependency_resolution_no_results(self, get_resolver):
d = ConcreteDynamicDependency()
resolvers = [MockResolver("", None)]
get_resolver.side_effect = resolvers
with pytest.raises(DependencyResolutionError):
d.resolve(mock.Mock(), [DependencyResolutionStrategy.UNMANAGED_HEAD])
class TestGitHubDynamicSubfolderDependency:
def test_flatten(self):
gh = GitHubDynamicSubfolderDependency(
github="https://github.com/Test/TestRepo", subfolder="foo"
)
gh.ref = "aaaa"
assert gh.is_unmanaged
assert gh.flatten(mock.Mock()) == [
UnmanagedGitHubRefDependency(
github="https://github.com/Test/TestRepo",
subfolder="foo",
ref="aaaa",
namespace_inject=None,
namespace_strip=None,
)
]
def test_flatten__unresolved(self):
context = mock.Mock()
gh = GitHubDynamicSubfolderDependency(
repo_owner="Test", repo_name="TestRepo", subfolder="foo"
)
with pytest.raises(DependencyResolutionError) as e:
gh.flatten(context)
assert "is not resolved" in str(e)
def test_name(self):
gh = GitHubDynamicSubfolderDependency(
repo_owner="Test", repo_name="TestRepo", subfolder="foo"
)
assert gh.github in gh.name and gh.subfolder in gh.name
def test_description(self):
gh = GitHubDynamicSubfolderDependency(
repo_owner="Test", repo_name="TestRepo", subfolder="foo"
)
gh.ref = "aaaa"
assert (
gh.github in gh.description
and gh.subfolder in gh.description
and gh.ref in gh.description
)
class TestGitHubDynamicDependency:
def test_create_repo_name(self):
gh = GitHubDynamicDependency(repo_owner="Test", repo_name="TestRepo")
assert gh.github == "https://github.com/Test/TestRepo"
def test_create_failure(self):
with pytest.raises(ValidationError):
GitHubDynamicDependency(repo_owner="Test")
with pytest.raises(ValidationError):
GitHubDynamicDependency(
github="http://github.com/Test/TestRepo", tag="tag/1.0", ref="aaaaa"
)
with pytest.raises(ValidationError):
GitHubDynamicDependency(
github="http://github.com/Test/TestRepo", namespace_inject="foo"
)
def test_flatten(self, project_config):
gh = GitHubDynamicDependency(github="https://github.com/SFDO-Tooling/RootRepo")
gh.ref = "aaaaa"
gh.package_dependency = PackageNamespaceVersionDependency(
namespace="bar", version="2.0"
)
assert gh.flatten(project_config) == [
GitHubDynamicDependency(
github="https://github.com/SFDO-Tooling/DependencyRepo",
password_env_name="DEP_PW",
),
UnmanagedGitHubRefDependency(
github="https://github.com/SFDO-Tooling/RootRepo",
subfolder="unpackaged/pre/first",
unmanaged=True,
ref="aaaaa",
),
UnmanagedGitHubRefDependency(
github="https://github.com/SFDO-Tooling/RootRepo",
subfolder="unpackaged/pre/second",
unmanaged=True,
ref="aaaaa",
),
PackageNamespaceVersionDependency(namespace="bar", version="2.0"),
UnmanagedGitHubRefDependency(
github="https://github.com/SFDO-Tooling/RootRepo",
subfolder="unpackaged/post/first",
unmanaged=False,
ref="aaaaa",
namespace_inject="bar",
),
]
def test_flatten__skip(self, project_config):
gh = GitHubDynamicDependency(
github="https://github.com/SFDO-Tooling/RootRepo",
skip="unpackaged/pre/first",
)
gh.ref = "aaaaa"
gh.package_dependency = PackageNamespaceVersionDependency(
namespace="bar", version="2.0"
)
assert gh.flatten(project_config) == [
GitHubDynamicDependency(
github="https://github.com/SFDO-Tooling/DependencyRepo",
password_env_name="DEP_PW",
),
UnmanagedGitHubRefDependency(
github="https://github.com/SFDO-Tooling/RootRepo",
subfolder="unpackaged/pre/second",
unmanaged=True,
ref="aaaaa",
),
PackageNamespaceVersionDependency(namespace="bar", version="2.0"),
UnmanagedGitHubRefDependency(
github="https://github.com/SFDO-Tooling/RootRepo",
subfolder="unpackaged/post/first",
unmanaged=False,
ref="aaaaa",
namespace_inject="bar",
),
]
def test_flatten__not_found(self, project_config):
gh = GitHubDynamicDependency(
github="https://github.com/SFDO-Tooling/NoUnmanagedPreRepo",
)
gh.ref = "aaaaa"
gh.package_dependency = PackageNamespaceVersionDependency(
namespace="foo", version="2.0"
)
assert gh.flatten(project_config) == [
PackageNamespaceVersionDependency(namespace="foo", version="2.0"),
]
def test_flatten__unresolved(self):
context = mock.Mock()
gh = GitHubDynamicDependency(repo_owner="Test", repo_name="TestRepo")
with pytest.raises(DependencyResolutionError) as e:
gh.flatten(context)
assert "is not resolved" in str(e)
def test_flatten__bad_transitive_dep(self, project_config):
gh = GitHubDynamicDependency(repo_owner="Test", repo_name="RootRepoBadDep")
gh.ref = "aaaa"
with pytest.raises(DependencyResolutionError) as e:
gh.flatten(project_config)
assert "transitive dependency could not be parsed" in str(e)
def test_flatten__unmanaged_src(self, project_config):
gh = GitHubDynamicDependency(
github="https://github.com/SFDO-Tooling/RootRepo",
unmanaged=True,
)
gh.ref = "aaaaa"
assert gh.flatten(project_config) == [
GitHubDynamicDependency(
github="https://github.com/SFDO-Tooling/DependencyRepo",
password_env_name="DEP_PW",
),
UnmanagedGitHubRefDependency(
github="https://github.com/SFDO-Tooling/RootRepo",
subfolder="unpackaged/pre/first",
unmanaged=True,
ref="aaaaa",
),
UnmanagedGitHubRefDependency(
github="https://github.com/SFDO-Tooling/RootRepo",
subfolder="unpackaged/pre/second",
unmanaged=True,
ref="aaaaa",
),
UnmanagedGitHubRefDependency(
github="https://github.com/SFDO-Tooling/RootRepo",
unmanaged=True,
ref="aaaaa",
),
UnmanagedGitHubRefDependency(
github="https://github.com/SFDO-Tooling/RootRepo",
subfolder="unpackaged/post/first",
unmanaged=True,
ref="aaaaa",
namespace_strip="bar",
),
]
def test_flatten__no_release(self, project_config):
gh = GitHubDynamicDependency(
github="https://github.com/SFDO-Tooling/RootRepo",
unmanaged=False,
)
gh.ref = "aaaaa"
with pytest.raises(DependencyResolutionError) as e:
gh.flatten(project_config)
assert "Could not find latest release" in str(e)
def test_name(self):
gh = GitHubDynamicDependency(github="https://github.com/SFDO-Tooling/RootRepo")
assert gh.github in gh.name
class TestPackageNamespaceVersionDependency:
@mock.patch(
"cumulusci.core.dependencies.dependencies.install_package_by_namespace_version"
)
def test_install(self, install_package_by_namespace_version):
m = PackageNamespaceVersionDependency(namespace="test", version="1.0")
context = mock.Mock()
org = OrgConfig({}, "dev")
org._installed_packages = {}
m.install(context, org)
install_package_by_namespace_version.assert_called_once_with(
context,
org,
m.namespace,
m.version,
PackageInstallOptions(),
retry_options=DEFAULT_PACKAGE_RETRY_OPTIONS,
)
@mock.patch(
"cumulusci.core.dependencies.dependencies.install_package_by_namespace_version"
)
def test_install__already_installed(self, install_package_by_namespace_version):
m = PackageNamespaceVersionDependency(namespace="test", version="1.0")
context = mock.Mock()
org = OrgConfig({}, "dev")
org._installed_packages = {
"test": [VersionInfo(id="04t000000000000", number=StrictVersion("1.0"))]
}
m.install(context, org)
install_package_by_namespace_version.assert_not_called()
@mock.patch(
"cumulusci.core.dependencies.dependencies.install_package_by_namespace_version"
)
def test_install__newer_beta(self, install_package_by_namespace_version):
m = PackageNamespaceVersionDependency(namespace="test", version="1.1 (Beta 4)")
context = mock.Mock()
org = OrgConfig({}, "dev")
org._installed_packages = {
"test": [VersionInfo(id="04t000000000000", number=StrictVersion("1.0"))]
}
m.install(context, org)
install_package_by_namespace_version.assert_called_once_with(
context,
org,
m.namespace,
m.version,
PackageInstallOptions(),
retry_options=DEFAULT_PACKAGE_RETRY_OPTIONS,
)
@mock.patch(
"cumulusci.core.dependencies.dependencies.install_package_by_namespace_version"
)
def test_install__custom_options(self, install_package_by_namespace_version):
m = PackageNamespaceVersionDependency(namespace="foo", version="1.0")
context = mock.Mock()
org = OrgConfig({}, "dev")
org._installed_packages = {}
opts = PackageInstallOptions(password="test")
m.install(context, org, options=opts)
install_package_by_namespace_version.assert_called_once_with(
context,
org,
m.namespace,
m.version,
opts,
retry_options=DEFAULT_PACKAGE_RETRY_OPTIONS,
)
@mock.patch(
"cumulusci.core.dependencies.dependencies.install_package_by_namespace_version"
)
def test_install__key_from_env(self, install_package_by_namespace_version):
m = PackageNamespaceVersionDependency(
namespace="foo", version="1.0", password_env_name="PW"
)
context = mock.Mock()
org = OrgConfig({}, "dev")
org._installed_packages = {}
with mock.patch.dict(os.environ, PW="testpw"):
m.install(context, org)
opts = install_package_by_namespace_version.call_args[0][4]
assert opts.password == "testpw"
def test_name(self):
assert (
PackageNamespaceVersionDependency(namespace="foo", version="1.0").name
== "Install foo 1.0"
)
def test_package_name(self):
assert (
PackageNamespaceVersionDependency(
namespace="foo", version="1.0", package_name="Foo"
).package
== "Foo"
)
assert (
PackageNamespaceVersionDependency(namespace="foo", version="1.0").package
== "foo"
)
class TestPackageVersionIdDependency:
@mock.patch(
"cumulusci.core.dependencies.dependencies.install_package_by_version_id"
)
def test_install(self, install_package_by_version_id):
m = PackageVersionIdDependency(version_id="04t000000000000")
context = mock.Mock()
org = OrgConfig({}, "dev")
org._installed_packages = {}
m.install(context, org)
install_package_by_version_id.assert_called_once_with(
context,
org,
m.version_id,
PackageInstallOptions(),
retry_options=DEFAULT_PACKAGE_RETRY_OPTIONS,
)
@mock.patch(
"cumulusci.core.dependencies.dependencies.install_package_by_version_id"
)
def test_install__already_installed(self, install_package_by_version_id):
m = PackageVersionIdDependency(version_id="04t000000000000")
context = mock.Mock()
org = OrgConfig({}, "dev")
org._installed_packages = {
"04t000000000000": [VersionInfo(number="1.0", id="04t000000000000")]
}
m.install(context, org)
install_package_by_version_id.assert_not_called()
@mock.patch(
"cumulusci.core.dependencies.dependencies.install_package_by_version_id"
)
def test_install__custom_options(self, install_package_by_version_id):
m = PackageVersionIdDependency(version_id="04t000000000000")
context = mock.Mock()
org = OrgConfig({}, "dev")
org._installed_packages = {}
opts = PackageInstallOptions(password="test")
m.install(context, org, options=opts)
install_package_by_version_id.assert_called_once_with(
context,
org,
m.version_id,
opts,
retry_options=DEFAULT_PACKAGE_RETRY_OPTIONS,
)
@mock.patch(
"cumulusci.core.dependencies.dependencies.install_package_by_version_id"
)
def test_install__key_from_env(self, install_package_by_version_id):
m = PackageVersionIdDependency(
version_id="04t000000000000", password_env_name="PW"
)
context = mock.Mock()
org = OrgConfig({}, "dev")
org._installed_packages = {}
with mock.patch.dict(os.environ, PW="testpw"):
m.install(context, org)
opts = install_package_by_version_id.call_args[0][3]
assert opts.password == "testpw"
def test_name(self):
assert (
PackageVersionIdDependency(
package_name="foo", version_id="04t000000000000"
).name
== "Install foo 04t000000000000"
)
def test_package_name(self):
assert (
PackageVersionIdDependency(version_id="04t000000000000").package
== "Unknown Package"
)
class TestUnmanagedGitHubRefDependency:
def test_validation(self):
with pytest.raises(ValidationError):
UnmanagedGitHubRefDependency(github="http://github.com")
u = UnmanagedGitHubRefDependency(
github="https://github.com/Test/TestRepo",
ref="aaaaaaaa",
namespace_token="obsolete but accepted",
filename_token="obsolete but accepted",
)
u = UnmanagedGitHubRefDependency(
repo_owner="Test", repo_name="TestRepo", ref="aaaaaaaa"
)
assert u.github == "https://github.com/Test/TestRepo"
@mock.patch(
"cumulusci.core.dependencies.dependencies.download_extract_github_from_repo"
)
@mock.patch("cumulusci.core.dependencies.dependencies.MetadataPackageZipBuilder")
@mock.patch("cumulusci.core.dependencies.dependencies.ApiDeploy")
def test_install(self, api_deploy_mock, zip_builder_mock, download_mock):
d = UnmanagedGitHubRefDependency(
github="http://github.com/Test/TestRepo", ref="aaaaaaaa"
)
zf = ZipFile(io.BytesIO(), "w")
zf.writestr("package.xml", "test")
download_mock.return_value = zf
context = mock.Mock()
org = mock.Mock()
d.install(context, org)
download_mock.assert_called_once_with(
context.get_repo_from_url.return_value, ref=d.ref
)
zip_builder_mock.from_zipfile.assert_called_once_with(
download_mock.return_value,
path=None,
options={
"unmanaged": True,
"namespace_inject": None,
"namespace_strip": None,
},
logger=mock.ANY, # the logger
)
api_deploy_mock.assert_called_once_with(
mock.ANY, # The context object is checked below
zip_builder_mock.from_zipfile.return_value.as_base64.return_value,
)
mock_task = api_deploy_mock.call_args_list[0][0][0]
assert mock_task.org_config == org
assert mock_task.project_config == context
api_deploy_mock.return_value.assert_called_once()
def test_get_unmanaged(self):
org = mock.Mock()
org.installed_packages = {"foo": "1.0"}
assert (
UnmanagedGitHubRefDependency(
github="http://github.com/Test/TestRepo", ref="aaaa", unmanaged=True
)._get_unmanaged(org)
is True
)
assert (
UnmanagedGitHubRefDependency(
github="http://github.com/Test/TestRepo", ref="aaaa"
)._get_unmanaged(org)
is True
)
assert (
UnmanagedGitHubRefDependency(
github="http://github.com/Test/TestRepo",
ref="aaaa",
namespace_inject="foo",
)._get_unmanaged(org)
is False
)
assert (
UnmanagedGitHubRefDependency(
github="http://github.com/Test/TestRepo",
ref="aaaa",
namespace_inject="bar",
)._get_unmanaged(org)
is True
)
def test_name(self):
assert (
UnmanagedGitHubRefDependency(
github="http://github.com/Test/TestRepo",
subfolder="unpackaged/pre/first",
ref="aaaa",
).name
== "Deploy http://github.com/Test/TestRepo/unpackaged/pre/first"
)
assert (
UnmanagedGitHubRefDependency(
github="http://github.com/Test/TestRepo",
ref="aaaa",
).name
== "Deploy http://github.com/Test/TestRepo"
)
class TestUnmanagedZipURLDependency:
@mock.patch("cumulusci.core.dependencies.dependencies.download_extract_zip")
@mock.patch("cumulusci.core.dependencies.dependencies.MetadataPackageZipBuilder")
@mock.patch("cumulusci.core.dependencies.dependencies.ApiDeploy")
def test_install(self, api_deploy_mock, zip_builder_mock, download_mock):
d = UnmanagedZipURLDependency(zip_url="http://foo.com")
zf = ZipFile(io.BytesIO(), "w")
zf.writestr("src/package.xml", "test")
download_mock.return_value = zf
context = mock.Mock()
org = mock.Mock()
d.install(context, org)
download_mock.assert_called_once_with(d.zip_url)
zip_builder_mock.from_zipfile.assert_called_once_with(
mock.ANY,
options={
"unmanaged": True,
"namespace_inject": None,
"namespace_strip": None,
},
path=None,
logger=mock.ANY, # the logger
)
api_deploy_mock.assert_called_once_with(
mock.ANY, # The context object is checked below
zip_builder_mock.from_zipfile.return_value.as_base64.return_value,
)
mock_task = api_deploy_mock.call_args_list[0][0][0]
assert mock_task.org_config == org
assert mock_task.project_config == context
api_deploy_mock.return_value.assert_called_once()
def test_get_unmanaged(self):
org = mock.Mock()
org.installed_packages = {"foo": "1.0"}
assert (
UnmanagedZipURLDependency(
zip_url="http://foo.com", unmanaged=True
)._get_unmanaged(org)
is True
)
assert (
UnmanagedZipURLDependency(
zip_url="http://foo.com", namespace_inject="foo"
)._get_unmanaged(org)
is False
)
assert (
UnmanagedZipURLDependency(
zip_url="http://foo.com", namespace_inject="bar"
)._get_unmanaged(org)
is True
)
def test_name(self):
assert (
UnmanagedZipURLDependency(zip_url="http://foo.com", subfolder="bar").name
== "Deploy http://foo.com /bar"
)
@mock.patch("cumulusci.core.dependencies.dependencies.MetadataPackageZipBuilder")
@mock.patch("cumulusci.core.dependencies.dependencies.download_extract_zip")
@mock.patch("cumulusci.core.dependencies.dependencies.zip_subfolder")
def test_get_metadata_package_zip_builder__mdapi_root(
self, subfolder_mock, download_zip_mock, zipbuilder_mock
):
zf = ZipFile(io.BytesIO(), "w")
zf.writestr("src/package.xml", "test")
dep = UnmanagedZipURLDependency(zip_url="http://foo.com")
download_zip_mock.return_value = zf
subfolder_mock.return_value = zip_subfolder(zf, "src")
context = mock.Mock()
org = mock.Mock()
assert (
dep.get_metadata_package_zip_builder(context, org)
== zipbuilder_mock.from_zipfile.return_value
)
subfolder_mock.assert_called_once_with(zf, "src")
zipbuilder_mock.from_zipfile.assert_called_once_with(
subfolder_mock.return_value,
path=None,
options={
"unmanaged": True,
"namespace_inject": None,
"namespace_strip": None,
},
logger=context.logger,
)
@mock.patch("cumulusci.core.dependencies.dependencies.MetadataPackageZipBuilder")
@mock.patch("cumulusci.core.dependencies.dependencies.download_extract_zip")
@mock.patch("cumulusci.core.dependencies.dependencies.zip_subfolder")
def test_get_metadata_package_zip_builder__mdapi_subfolder(
self, subfolder_mock, download_zip_mock, zipbuilder_mock
):
zf = ZipFile(io.BytesIO(), "w")
zf.writestr("unpackaged/pre/first/package.xml", "test")
dep = UnmanagedZipURLDependency(
zip_url="http://foo.com", subfolder="unpackaged/pre/first"
)
download_zip_mock.return_value = zf
subfolder_mock.return_value = zip_subfolder(zf, "unpackaged/pre/first")
context = mock.Mock()
org = mock.Mock()
assert (
dep.get_metadata_package_zip_builder(context, org)
== zipbuilder_mock.from_zipfile.return_value
)
subfolder_mock.assert_called_once_with(zf, "unpackaged/pre/first")
zipbuilder_mock.from_zipfile.assert_called_once_with(
subfolder_mock.return_value,
path=None,
options={
"unmanaged": True,
"namespace_inject": None,
"namespace_strip": None,
},
logger=context.logger,
)
@mock.patch("cumulusci.core.dependencies.dependencies.MetadataPackageZipBuilder")
@mock.patch("cumulusci.core.dependencies.dependencies.download_extract_zip")
@mock.patch("cumulusci.core.dependencies.dependencies.zip_subfolder")
@mock.patch("cumulusci.core.sfdx.sfdx")
def test_get_metadata_package_zip_builder__sfdx(
self, sfdx_mock, subfolder_mock, download_zip_mock, zipbuilder_mock
):
zf = ZipFile(io.BytesIO(), "w")
zf.writestr("force-app/main/default/classes/", "test")
dep = UnmanagedZipURLDependency(zip_url="http://foo.com", subfolder="force-app")
download_zip_mock.return_value = zf
context = mock.Mock()
org = mock.Mock()
assert (
dep.get_metadata_package_zip_builder(context, org)
== zipbuilder_mock.from_zipfile.return_value
)
subfolder_mock.assert_not_called()
zipbuilder_mock.from_zipfile.assert_called_once_with(
None,
path=mock.ANY,
options={
"unmanaged": True,
"namespace_inject": None,
"namespace_strip": None,
},
logger=context.logger,
)
sfdx_mock.assert_called_once_with(
"force:source:convert",
args=["-d", mock.ANY, "-r", "force-app"],
capture_output=True,
check_return=True,
)
class TestParseDependency:
def test_parse_managed_package_dep(self):
m = parse_dependency({"version": "1.0", "namespace": "foo"})
assert isinstance(m, PackageNamespaceVersionDependency)
m = parse_dependency({"version_id": "04t000000000000"})
assert isinstance(m, PackageVersionIdDependency)
def test_parse_github_dependency(self):
g = parse_dependency({"github": "https://github.com/Test/TestRepo"})
assert isinstance(g, GitHubDynamicDependency)
g = parse_dependency({"repo_owner": "Test", "repo_name": "TestRepo"})
assert isinstance(g, GitHubDynamicDependency)
def test_parse_unmanaged_dependency(self):
u = parse_dependency(
{"repo_owner": "Test", "repo_name": "TestRepo", "ref": "aaaaaaaa"}
)
assert isinstance(u, UnmanagedGitHubRefDependency)
u = parse_dependency(
{"github": "https://github.com/Test/TestRepo", "ref": "aaaaaaaa"}
)
assert isinstance(u, UnmanagedGitHubRefDependency)
u = parse_dependency(
{
"zip_url": "https://github.com/Test/TestRepo",
"subfolder": "unpackaged/pre",
}
)
assert isinstance(u, UnmanagedZipURLDependency)
| 33.994463
| 88
| 0.616998
| 2,895
| 30,697
| 6.313644
| 0.081174
| 0.016851
| 0.041033
| 0.034905
| 0.821753
| 0.801893
| 0.772787
| 0.74773
| 0.699475
| 0.651056
| 0
| 0.010662
| 0.281982
| 30,697
| 902
| 89
| 34.032151
| 0.818611
| 0.00303
| 0
| 0.616095
| 0
| 0
| 0.16592
| 0.064969
| 0
| 0
| 0
| 0
| 0.097625
| 1
| 0.068602
| false
| 0.011873
| 0.019789
| 0.006596
| 0.112137
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
22f42c85a24917cc703e5a57b5a364156978a407
| 119
|
py
|
Python
|
tools/bin/pythonSrc/pychecker-0.8.18/test_input/import44.py
|
YangHao666666/hawq
|
10cff8350f1ba806c6fec64eb67e0e6f6f24786c
|
[
"Artistic-1.0-Perl",
"ISC",
"bzip2-1.0.5",
"TCL",
"Apache-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"MIT",
"PostgreSQL",
"BSD-3-Clause"
] | 450
|
2015-09-05T09:12:51.000Z
|
2018-08-30T01:45:36.000Z
|
tools/bin/pythonSrc/pychecker-0.8.18/test_input/import44.py
|
YangHao666666/hawq
|
10cff8350f1ba806c6fec64eb67e0e6f6f24786c
|
[
"Artistic-1.0-Perl",
"ISC",
"bzip2-1.0.5",
"TCL",
"Apache-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"MIT",
"PostgreSQL",
"BSD-3-Clause"
] | 1,274
|
2015-09-22T20:06:16.000Z
|
2018-08-31T22:14:00.000Z
|
tools/bin/pythonSrc/pychecker-0.8.18/test_input/import44.py
|
YangHao666666/hawq
|
10cff8350f1ba806c6fec64eb67e0e6f6f24786c
|
[
"Artistic-1.0-Perl",
"ISC",
"bzip2-1.0.5",
"TCL",
"Apache-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"MIT",
"PostgreSQL",
"BSD-3-Clause"
] | 278
|
2015-09-21T19:15:06.000Z
|
2018-08-31T00:36:51.000Z
|
'd'
class Ccc:
'd'
def __init__(self, c):
pass
class Ddd:
'd'
def __init__(self):
self.ccc = Ccc(1)
| 9.153846
| 24
| 0.546218
| 19
| 119
| 3
| 0.526316
| 0.140351
| 0.280702
| 0.421053
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.011905
| 0.294118
| 119
| 12
| 25
| 9.916667
| 0.666667
| 0.042017
| 0
| 0.333333
| 0
| 0
| 0.025424
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.222222
| false
| 0.111111
| 0
| 0
| 0.444444
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
22f53ed7d646ca7a8e80685173ae626910c48100
| 416
|
py
|
Python
|
src/Core/BetaFunctions/__init__.py
|
NuxDD/pyrate
|
01e61027813d4eaf674dd5ce5db6898ea5ca22aa
|
[
"Apache-2.0"
] | 7
|
2020-07-28T20:01:11.000Z
|
2021-12-21T00:52:47.000Z
|
src/Core/BetaFunctions/__init__.py
|
NuxDD/pyrate
|
01e61027813d4eaf674dd5ce5db6898ea5ca22aa
|
[
"Apache-2.0"
] | 6
|
2020-11-20T10:54:38.000Z
|
2021-09-28T08:07:32.000Z
|
src/Core/BetaFunctions/__init__.py
|
NuxDD/pyrate
|
01e61027813d4eaf674dd5ce5db6898ea5ca22aa
|
[
"Apache-2.0"
] | 6
|
2020-10-06T17:58:16.000Z
|
2021-12-10T06:11:41.000Z
|
# -*- coding: utf-8 -*-
from .GaugeCouplings import GaugeBetaFunction
from .QuarticCouplings import QuarticBetaFunction
from .YukawaCouplings import YukawaBetaFunction
from .FermionMassCouplings import FermionMassBetaFunction
from .TrilinearCouplings import TrilinearBetaFunction
from .ScalarMassCouplings import ScalarMassBetaFunction
from .Anomalous_Vevs import FermionAnomalous, ScalarAnomalous, VevBetaFunction
| 41.6
| 78
| 0.870192
| 34
| 416
| 10.617647
| 0.647059
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.002632
| 0.086538
| 416
| 9
| 79
| 46.222222
| 0.947368
| 0.050481
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
a3ba32e9e90a8ae68516d31e8c81043b11737e6b
| 261
|
py
|
Python
|
nipy/modalities/fmri/spm/__init__.py
|
yarikoptic/NiPy-OLD
|
8759b598ac72d3b9df7414642c7a662ad9c55ece
|
[
"BSD-3-Clause"
] | 1
|
2015-08-22T16:14:45.000Z
|
2015-08-22T16:14:45.000Z
|
nipy/modalities/fmri/spm/__init__.py
|
yarikoptic/NiPy-OLD
|
8759b598ac72d3b9df7414642c7a662ad9c55ece
|
[
"BSD-3-Clause"
] | null | null | null |
nipy/modalities/fmri/spm/__init__.py
|
yarikoptic/NiPy-OLD
|
8759b598ac72d3b9df7414642c7a662ad9c55ece
|
[
"BSD-3-Clause"
] | null | null | null |
"""
An (approximate) version of SPM's run-level model for fMRI data
Consists of an OLS pass through the data, followed by a pooled estimate
of a covariance matrix constructed from a series expansion of an
AR1 model, expanded in terms of rho.
"""
import model
| 26.1
| 71
| 0.770115
| 45
| 261
| 4.466667
| 0.755556
| 0.039801
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.004673
| 0.180077
| 261
| 9
| 72
| 29
| 0.934579
| 0.911877
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
a3c0763f59942808a197df537ebd7cd7aaac1913
| 467
|
py
|
Python
|
bitmovin_api_sdk/encoding/manifests/dash/periods/adaptationsets/representations/fmp4/drm/__init__.py
|
jaythecaesarean/bitmovin-api-sdk-python
|
48166511fcb9082041c552ace55a9b66cc59b794
|
[
"MIT"
] | 11
|
2019-07-03T10:41:16.000Z
|
2022-02-25T21:48:06.000Z
|
bitmovin_api_sdk/encoding/manifests/dash/periods/adaptationsets/representations/fmp4/drm/__init__.py
|
jaythecaesarean/bitmovin-api-sdk-python
|
48166511fcb9082041c552ace55a9b66cc59b794
|
[
"MIT"
] | 8
|
2019-11-23T00:01:25.000Z
|
2021-04-29T12:30:31.000Z
|
bitmovin_api_sdk/encoding/manifests/dash/periods/adaptationsets/representations/fmp4/drm/__init__.py
|
jaythecaesarean/bitmovin-api-sdk-python
|
48166511fcb9082041c552ace55a9b66cc59b794
|
[
"MIT"
] | 13
|
2020-01-02T14:58:18.000Z
|
2022-03-26T12:10:30.000Z
|
from bitmovin_api_sdk.encoding.manifests.dash.periods.adaptationsets.representations.fmp4.drm.drm_api import DrmApi
from bitmovin_api_sdk.encoding.manifests.dash.periods.adaptationsets.representations.fmp4.drm.contentprotection.contentprotection_api import ContentprotectionApi
from bitmovin_api_sdk.encoding.manifests.dash.periods.adaptationsets.representations.fmp4.drm.dash_fmp4_drm_representation_list_query_params import DashFmp4DrmRepresentationListQueryParams
| 116.75
| 188
| 0.914347
| 54
| 467
| 7.648148
| 0.388889
| 0.067797
| 0.108959
| 0.130751
| 0.595642
| 0.595642
| 0.595642
| 0.595642
| 0.595642
| 0.595642
| 0
| 0.010989
| 0.025696
| 467
| 3
| 189
| 155.666667
| 0.896703
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
a3d225a7b3bf8f5a1fead9c81606880f2d0aec4e
| 19
|
py
|
Python
|
cve_2021_3572.py
|
skazi0/CVE-2021-3572
|
bcf31d285c0c314d9e3d5444ecb6ff0d3380661b
|
[
"MIT"
] | 1
|
2021-06-07T09:47:13.000Z
|
2021-06-07T09:47:13.000Z
|
cve_2021_3572.py
|
skazi0/CVE-2021-3572
|
bcf31d285c0c314d9e3d5444ecb6ff0d3380661b
|
[
"MIT"
] | null | null | null |
cve_2021_3572.py
|
skazi0/CVE-2021-3572
|
bcf31d285c0c314d9e3d5444ecb6ff0d3380661b
|
[
"MIT"
] | 2
|
2021-06-08T21:37:53.000Z
|
2021-07-14T20:28:50.000Z
|
version = "9999.0"
| 9.5
| 18
| 0.631579
| 3
| 19
| 4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.3125
| 0.157895
| 19
| 1
| 19
| 19
| 0.4375
| 0
| 0
| 0
| 0
| 0
| 0.315789
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
a3e3c7541d398152d5cdbc38df73f8667f2935c2
| 38
|
py
|
Python
|
ndscheduler/corescheduler/datastore/providers/__init__.py
|
JonathanCalderon/ndscheduler
|
1a410fd28bc248506dfefd18348e181e1169412c
|
[
"BSD-2-Clause"
] | 1,038
|
2015-12-22T20:20:48.000Z
|
2022-03-24T09:39:16.000Z
|
ndscheduler/corescheduler/datastore/providers/__init__.py
|
JonathanCalderon/ndscheduler
|
1a410fd28bc248506dfefd18348e181e1169412c
|
[
"BSD-2-Clause"
] | 76
|
2015-12-23T18:19:44.000Z
|
2021-12-27T10:59:41.000Z
|
ndscheduler/corescheduler/datastore/providers/__init__.py
|
JonathanCalderon/ndscheduler
|
1a410fd28bc248506dfefd18348e181e1169412c
|
[
"BSD-2-Clause"
] | 215
|
2015-12-24T06:16:54.000Z
|
2022-03-30T02:35:30.000Z
|
"""Init file for providers library"""
| 19
| 37
| 0.710526
| 5
| 38
| 5.4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.131579
| 38
| 1
| 38
| 38
| 0.818182
| 0.815789
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
4a87dbef63bbf02dbb94b3400c78c8a0210c7bbb
| 259
|
py
|
Python
|
rpaas/__init__.py
|
pedrokiefer/rpaas
|
7e2285ef4fbd3959109eead15c82d8ff3accdc94
|
[
"BSD-3-Clause"
] | null | null | null |
rpaas/__init__.py
|
pedrokiefer/rpaas
|
7e2285ef4fbd3959109eead15c82d8ff3accdc94
|
[
"BSD-3-Clause"
] | null | null | null |
rpaas/__init__.py
|
pedrokiefer/rpaas
|
7e2285ef4fbd3959109eead15c82d8ff3accdc94
|
[
"BSD-3-Clause"
] | null | null | null |
# Copyright 2016 rpaas authors. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
import os
from rpaas import manager
def get_manager():
return manager.Manager(dict(os.environ))
| 21.583333
| 52
| 0.756757
| 42
| 259
| 4.642857
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.018868
| 0.181467
| 259
| 11
| 53
| 23.545455
| 0.900943
| 0.571429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.5
| 0.25
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 5
|
4aa30a9ccc1e5ec1c94518e55afffde34854a788
| 94
|
py
|
Python
|
courses/renderer/sections/__init__.py
|
office-for-students/wagtail-CMS
|
98789c279edf48f2bbedb5415437da3317f0e12b
|
[
"MIT"
] | 4
|
2019-06-04T07:18:44.000Z
|
2020-06-15T22:27:36.000Z
|
courses/renderer/sections/__init__.py
|
office-for-students/wagtail-CMS
|
98789c279edf48f2bbedb5415437da3317f0e12b
|
[
"MIT"
] | 38
|
2019-05-09T13:14:56.000Z
|
2022-03-12T00:54:57.000Z
|
courses/renderer/sections/__init__.py
|
office-for-students/wagtail-CMS
|
98789c279edf48f2bbedb5415437da3317f0e12b
|
[
"MIT"
] | 3
|
2019-09-26T14:32:36.000Z
|
2021-05-06T15:48:01.000Z
|
from .satisfaction import SatisfactionSection
from .course_details import CourseDetailSection
| 31.333333
| 47
| 0.893617
| 9
| 94
| 9.222222
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.085106
| 94
| 2
| 48
| 47
| 0.965116
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
4ac90581b39714fd39724be95be388c35e9d44f1
| 313
|
py
|
Python
|
notes/design/low-level/case-studies/auction-system/auction/commands/CommandAddSeller.py
|
Anmol-Singh-Jaggi/interview-notes
|
65af75e2b5725894fa5e13bb5cd9ecf152a0d652
|
[
"MIT"
] | 6
|
2020-07-05T05:15:19.000Z
|
2021-01-24T20:17:14.000Z
|
notes/design/low-level/case-studies/auction-system/auction/commands/CommandAddSeller.py
|
Anmol-Singh-Jaggi/interview-notes
|
65af75e2b5725894fa5e13bb5cd9ecf152a0d652
|
[
"MIT"
] | null | null | null |
notes/design/low-level/case-studies/auction-system/auction/commands/CommandAddSeller.py
|
Anmol-Singh-Jaggi/interview-notes
|
65af75e2b5725894fa5e13bb5cd9ecf152a0d652
|
[
"MIT"
] | 2
|
2020-09-14T06:46:37.000Z
|
2021-06-15T09:17:21.000Z
|
from commands.AbstractCommand import AbstractCommand
class CommandAddSeller(AbstractCommand):
def __init__(self, auction_system, seller_id):
self.auction_system = auction_system
self.seller_id = seller_id
def execute(self):
return self.auction_system.add_seller(self.seller_id)
| 28.454545
| 61
| 0.757188
| 37
| 313
| 6.054054
| 0.432432
| 0.232143
| 0.227679
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.175719
| 313
| 10
| 62
| 31.3
| 0.868217
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0
| 0.142857
| 0.142857
| 0.714286
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
436c8a8ce18ed59398208c39b4f6d1058766577a
| 221
|
py
|
Python
|
lambdas/package/db_config.py
|
charvi-a/320-S20-Track1
|
ac97504fc1fdedb1c311773b015570eeea8a8663
|
[
"BSD-3-Clause"
] | 9
|
2019-12-30T16:32:22.000Z
|
2020-03-03T20:14:47.000Z
|
lambdas/package/db_config.py
|
charvi-a/320-S20-Track1
|
ac97504fc1fdedb1c311773b015570eeea8a8663
|
[
"BSD-3-Clause"
] | 283
|
2020-02-03T15:16:03.000Z
|
2020-05-05T03:18:59.000Z
|
lambdas/package/db_config.py
|
charvi-a/320-S20-Track1
|
ac97504fc1fdedb1c311773b015570eeea8a8663
|
[
"BSD-3-Clause"
] | 3
|
2020-04-16T15:23:29.000Z
|
2020-05-12T00:38:41.000Z
|
DB_NAME = "postgres"
ARN = "arn:aws:rds:us-east-2:500514381816:cluster:postgres"
SECRET_ARN = "arn:aws:secretsmanager:us-east-2:500514381816:secret:rds-db-credentials/cluster-33FXTTBJUA6VTIJBXQWHEGXQRE/postgres-3QyWu7"
| 36.833333
| 137
| 0.809955
| 30
| 221
| 5.9
| 0.533333
| 0.067797
| 0.101695
| 0.214689
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.147619
| 0.049774
| 221
| 5
| 138
| 44.2
| 0.695238
| 0
| 0
| 0
| 0
| 0.333333
| 0.826484
| 0.789954
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
43b1e5ba3f28e256a57920b28b7ff13fa312e283
| 101
|
py
|
Python
|
python/ql/test/3/query-tests/Expressions/Formatting/test.py
|
vadi2/codeql
|
a806a4f08696d241ab295a286999251b56a6860c
|
[
"MIT"
] | 4,036
|
2020-04-29T00:09:57.000Z
|
2022-03-31T14:16:38.000Z
|
python/ql/test/3/query-tests/Expressions/Formatting/test.py
|
vadi2/codeql
|
a806a4f08696d241ab295a286999251b56a6860c
|
[
"MIT"
] | 2,970
|
2020-04-28T17:24:18.000Z
|
2022-03-31T22:40:46.000Z
|
python/ql/test/3/query-tests/Expressions/Formatting/test.py
|
ScriptBox99/github-codeql
|
2ecf0d3264db8fb4904b2056964da469372a235c
|
[
"MIT"
] | 794
|
2020-04-29T00:28:25.000Z
|
2022-03-30T08:21:46.000Z
|
def format_starred(t):
return "%x %x %x" % (0, *t)
def ascii(a, b):
return "%a %x" % (a, b)
| 16.833333
| 31
| 0.485149
| 19
| 101
| 2.526316
| 0.526316
| 0.083333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.013514
| 0.267327
| 101
| 6
| 32
| 16.833333
| 0.635135
| 0
| 0
| 0
| 0
| 0
| 0.128713
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
43d6ca6b7d5ae9e3429bfe511bb71714c4e4a7e5
| 51
|
py
|
Python
|
projects/faces/arcface/utils/__init__.py
|
Bingwen-Hu/hackaway
|
69727d76fd652390d9660e9ea4354ba5cc76dd5c
|
[
"BSD-2-Clause"
] | null | null | null |
projects/faces/arcface/utils/__init__.py
|
Bingwen-Hu/hackaway
|
69727d76fd652390d9660e9ea4354ba5cc76dd5c
|
[
"BSD-2-Clause"
] | null | null | null |
projects/faces/arcface/utils/__init__.py
|
Bingwen-Hu/hackaway
|
69727d76fd652390d9660e9ea4354ba5cc76dd5c
|
[
"BSD-2-Clause"
] | null | null | null |
from .visualizer import *
from .view_model import *
| 25.5
| 25
| 0.784314
| 7
| 51
| 5.571429
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.137255
| 51
| 2
| 26
| 25.5
| 0.886364
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
43e34b6872c10bd556cde69d7c29dd75d8668172
| 133
|
py
|
Python
|
liboptpy/unconstr_solvers/so/__init__.py
|
amkatrutsa/liboptpy
|
8e89b3f5a16aaed759c3cd727639c927ed5741cf
|
[
"MIT"
] | 57
|
2018-08-17T12:58:07.000Z
|
2022-03-22T16:18:28.000Z
|
liboptpy/unconstr_solvers/so/__init__.py
|
amkatrutsa/liboptpy
|
8e89b3f5a16aaed759c3cd727639c927ed5741cf
|
[
"MIT"
] | 6
|
2018-05-13T10:00:15.000Z
|
2021-04-04T12:08:02.000Z
|
liboptpy/unconstr_solvers/so/__init__.py
|
amkatrutsa/liboptpy
|
8e89b3f5a16aaed759c3cd727639c927ed5741cf
|
[
"MIT"
] | 16
|
2019-01-12T07:15:29.000Z
|
2022-03-22T11:52:29.000Z
|
from ._newton import NewtonMethod
from ._inexact_newton import InexactNewtonMethod
__all__ = ["NewtonMethod", "InexactNewtonMethod"]
| 33.25
| 49
| 0.834586
| 12
| 133
| 8.666667
| 0.583333
| 0.230769
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090226
| 133
| 4
| 49
| 33.25
| 0.859504
| 0
| 0
| 0
| 0
| 0
| 0.231343
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
78dccbf1ff1e194ce657657464f4ac48d5535e9c
| 99
|
py
|
Python
|
API_hardware/Adafruit_DHT_MOCK.py
|
andersenmp/Temperature_Controller
|
a1a1c498cf5dca4d4e28ba630a3f5939da0ab7ce
|
[
"Apache-2.0"
] | null | null | null |
API_hardware/Adafruit_DHT_MOCK.py
|
andersenmp/Temperature_Controller
|
a1a1c498cf5dca4d4e28ba630a3f5939da0ab7ce
|
[
"Apache-2.0"
] | null | null | null |
API_hardware/Adafruit_DHT_MOCK.py
|
andersenmp/Temperature_Controller
|
a1a1c498cf5dca4d4e28ba630a3f5939da0ab7ce
|
[
"Apache-2.0"
] | null | null | null |
#Mock class for Adafruit Python DHT mock
def read_retry(a,b):
print ("read_retry DHT",a,b)
| 12.375
| 40
| 0.686869
| 18
| 99
| 3.666667
| 0.666667
| 0.272727
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.20202
| 99
| 7
| 41
| 14.142857
| 0.835443
| 0.393939
| 0
| 0
| 0
| 0
| 0.245614
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
601ad1ac15a29a330f03f33758d30c4c2007c163
| 162
|
py
|
Python
|
my_project/__init__.py
|
sotch-pr35mac/clinc-business-logic-server-template-python
|
6178edb7b3bb368376720fe2baf0babe65a37329
|
[
"BSD-3-Clause"
] | 1
|
2019-11-11T15:40:35.000Z
|
2019-11-11T15:40:35.000Z
|
my_project/__init__.py
|
sotch-pr35mac/clinc-business-logic-server-template-python
|
6178edb7b3bb368376720fe2baf0babe65a37329
|
[
"BSD-3-Clause"
] | 3
|
2018-08-06T20:43:38.000Z
|
2021-06-10T20:43:52.000Z
|
my_project/__init__.py
|
sotch-pr35mac/clinc-business-logic-server-template-python
|
6178edb7b3bb368376720fe2baf0babe65a37329
|
[
"BSD-3-Clause"
] | 2
|
2018-08-06T18:37:39.000Z
|
2019-05-23T13:27:56.000Z
|
"""
Module imports for templates.python.business_logic.my_project
This file is automatically generated by ./scripts/empty_pyinit.sh
DO NOT EDIT IT MANUALLY
"""
| 20.25
| 65
| 0.796296
| 24
| 162
| 5.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.123457
| 162
| 7
| 66
| 23.142857
| 0.887324
| 0.944444
| 0
| null | 1
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
601e4eb867b89e6f2831588d206cf728c41cb9ff
| 112
|
py
|
Python
|
enthought/contexts/masks/index_context_mask.py
|
enthought/etsproxy
|
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
|
[
"BSD-3-Clause"
] | 3
|
2016-12-09T06:05:18.000Z
|
2018-03-01T13:00:29.000Z
|
enthought/contexts/masks/index_context_mask.py
|
enthought/etsproxy
|
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
|
[
"BSD-3-Clause"
] | 1
|
2020-12-02T00:51:32.000Z
|
2020-12-02T08:48:55.000Z
|
enthought/contexts/masks/index_context_mask.py
|
enthought/etsproxy
|
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
|
[
"BSD-3-Clause"
] | null | null | null |
# proxy module
from __future__ import absolute_import
from codetools.contexts.masks.index_context_mask import *
| 28
| 57
| 0.857143
| 15
| 112
| 5.933333
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.098214
| 112
| 3
| 58
| 37.333333
| 0.881188
| 0.107143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
604dbb41f945c60118614a6051f87fbfd444da30
| 80,027
|
py
|
Python
|
google/cloud/domains_v1/services/domains/async_client.py
|
LaudateCorpus1/python-domains
|
2494a6f379c911ecdddf1298abfb5ad7863906f1
|
[
"Apache-2.0"
] | null | null | null |
google/cloud/domains_v1/services/domains/async_client.py
|
LaudateCorpus1/python-domains
|
2494a6f379c911ecdddf1298abfb5ad7863906f1
|
[
"Apache-2.0"
] | null | null | null |
google/cloud/domains_v1/services/domains/async_client.py
|
LaudateCorpus1/python-domains
|
2494a6f379c911ecdddf1298abfb5ad7863906f1
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from collections import OrderedDict
import functools
import re
from typing import Dict, Optional, Sequence, Tuple, Type, Union
import pkg_resources
from google.api_core.client_options import ClientOptions
from google.api_core import exceptions as core_exceptions
from google.api_core import gapic_v1
from google.api_core import retry as retries
from google.auth import credentials as ga_credentials # type: ignore
from google.oauth2 import service_account # type: ignore
try:
OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault]
except AttributeError: # pragma: NO COVER
OptionalRetry = Union[retries.Retry, object] # type: ignore
from google.api_core import operation # type: ignore
from google.api_core import operation_async # type: ignore
from google.cloud.domains_v1.services.domains import pagers
from google.cloud.domains_v1.types import domains
from google.protobuf import empty_pb2 # type: ignore
from google.protobuf import field_mask_pb2 # type: ignore
from google.protobuf import timestamp_pb2 # type: ignore
from google.type import money_pb2 # type: ignore
from .transports.base import DomainsTransport, DEFAULT_CLIENT_INFO
from .transports.grpc_asyncio import DomainsGrpcAsyncIOTransport
from .client import DomainsClient
class DomainsAsyncClient:
"""The Cloud Domains API enables management and configuration of
domain names.
"""
_client: DomainsClient
DEFAULT_ENDPOINT = DomainsClient.DEFAULT_ENDPOINT
DEFAULT_MTLS_ENDPOINT = DomainsClient.DEFAULT_MTLS_ENDPOINT
registration_path = staticmethod(DomainsClient.registration_path)
parse_registration_path = staticmethod(DomainsClient.parse_registration_path)
common_billing_account_path = staticmethod(
DomainsClient.common_billing_account_path
)
parse_common_billing_account_path = staticmethod(
DomainsClient.parse_common_billing_account_path
)
common_folder_path = staticmethod(DomainsClient.common_folder_path)
parse_common_folder_path = staticmethod(DomainsClient.parse_common_folder_path)
common_organization_path = staticmethod(DomainsClient.common_organization_path)
parse_common_organization_path = staticmethod(
DomainsClient.parse_common_organization_path
)
common_project_path = staticmethod(DomainsClient.common_project_path)
parse_common_project_path = staticmethod(DomainsClient.parse_common_project_path)
common_location_path = staticmethod(DomainsClient.common_location_path)
parse_common_location_path = staticmethod(DomainsClient.parse_common_location_path)
@classmethod
def from_service_account_info(cls, info: dict, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
info.
Args:
info (dict): The service account private key info.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
DomainsAsyncClient: The constructed client.
"""
return DomainsClient.from_service_account_info.__func__(DomainsAsyncClient, info, *args, **kwargs) # type: ignore
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
DomainsAsyncClient: The constructed client.
"""
return DomainsClient.from_service_account_file.__func__(DomainsAsyncClient, filename, *args, **kwargs) # type: ignore
from_service_account_json = from_service_account_file
@classmethod
def get_mtls_endpoint_and_cert_source(
cls, client_options: Optional[ClientOptions] = None
):
"""Return the API endpoint and client cert source for mutual TLS.
The client cert source is determined in the following order:
(1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the
client cert source is None.
(2) if `client_options.client_cert_source` is provided, use the provided one; if the
default client cert source exists, use the default one; otherwise the client cert
source is None.
The API endpoint is determined in the following order:
(1) if `client_options.api_endpoint` if provided, use the provided one.
(2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the
default mTLS endpoint; if the environment variabel is "never", use the default API
endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise
use the default API endpoint.
More details can be found at https://google.aip.dev/auth/4114.
Args:
client_options (google.api_core.client_options.ClientOptions): Custom options for the
client. Only the `api_endpoint` and `client_cert_source` properties may be used
in this method.
Returns:
Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the
client cert source to use.
Raises:
google.auth.exceptions.MutualTLSChannelError: If any errors happen.
"""
return DomainsClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore
@property
def transport(self) -> DomainsTransport:
"""Returns the transport used by the client instance.
Returns:
DomainsTransport: The transport used by the client instance.
"""
return self._client.transport
get_transport_class = functools.partial(
type(DomainsClient).get_transport_class, type(DomainsClient)
)
def __init__(
self,
*,
credentials: ga_credentials.Credentials = None,
transport: Union[str, DomainsTransport] = "grpc_asyncio",
client_options: ClientOptions = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiates the domains client.
Args:
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
transport (Union[str, ~.DomainsTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
client_options (ClientOptions): Custom options for the client. It
won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
environment variable can also be used to override the endpoint:
"always" (always use the default mTLS endpoint), "never" (always
use the default regular endpoint) and "auto" (auto switch to the
default mTLS endpoint if client certificate is present, this is
the default value). However, the ``api_endpoint`` property takes
precedence if provided.
(2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
is "true", then the ``client_cert_source`` property can be used
to provide client certificate for mutual TLS transport. If
not provided, the default SSL client certificate will be used if
present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
set, no client certificate will be used.
Raises:
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
creation failed for any reason.
"""
self._client = DomainsClient(
credentials=credentials,
transport=transport,
client_options=client_options,
client_info=client_info,
)
async def search_domains(
self,
request: Union[domains.SearchDomainsRequest, dict] = None,
*,
location: str = None,
query: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> domains.SearchDomainsResponse:
r"""Searches for available domain names similar to the provided
query.
Availability results from this method are approximate; call
``RetrieveRegisterParameters`` on a domain before registering to
confirm availability.
Args:
request (Union[google.cloud.domains_v1.types.SearchDomainsRequest, dict]):
The request object. Request for the `SearchDomains`
method.
location (:class:`str`):
Required. The location. Must be in the format
``projects/*/locations/*``.
This corresponds to the ``location`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
query (:class:`str`):
Required. String used to search for
available domain names.
This corresponds to the ``query`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.domains_v1.types.SearchDomainsResponse:
Response for the SearchDomains method.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([location, query])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = domains.SearchDomainsRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if location is not None:
request.location = location
if query is not None:
request.query = query
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.search_domains,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("location", request.location),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
async def retrieve_register_parameters(
self,
request: Union[domains.RetrieveRegisterParametersRequest, dict] = None,
*,
location: str = None,
domain_name: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> domains.RetrieveRegisterParametersResponse:
r"""Gets parameters needed to register a new domain name, including
price and up-to-date availability. Use the returned values to
call ``RegisterDomain``.
Args:
request (Union[google.cloud.domains_v1.types.RetrieveRegisterParametersRequest, dict]):
The request object. Request for the
`RetrieveRegisterParameters` method.
location (:class:`str`):
Required. The location. Must be in the format
``projects/*/locations/*``.
This corresponds to the ``location`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
domain_name (:class:`str`):
Required. The domain name. Unicode
domain names must be expressed in
Punycode format.
This corresponds to the ``domain_name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.domains_v1.types.RetrieveRegisterParametersResponse:
Response for the RetrieveRegisterParameters method.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([location, domain_name])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = domains.RetrieveRegisterParametersRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if location is not None:
request.location = location
if domain_name is not None:
request.domain_name = domain_name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.retrieve_register_parameters,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("location", request.location),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
async def register_domain(
self,
request: Union[domains.RegisterDomainRequest, dict] = None,
*,
parent: str = None,
registration: domains.Registration = None,
yearly_price: money_pb2.Money = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Registers a new domain name and creates a corresponding
``Registration`` resource.
Call ``RetrieveRegisterParameters`` first to check availability
of the domain name and determine parameters like price that are
needed to build a call to this method.
A successful call creates a ``Registration`` resource in state
``REGISTRATION_PENDING``, which resolves to ``ACTIVE`` within
1-2 minutes, indicating that the domain was successfully
registered. If the resource ends up in state
``REGISTRATION_FAILED``, it indicates that the domain was not
registered successfully, and you can safely delete the resource
and retry registration.
Args:
request (Union[google.cloud.domains_v1.types.RegisterDomainRequest, dict]):
The request object. Request for the `RegisterDomain`
method.
parent (:class:`str`):
Required. The parent resource of the ``Registration``.
Must be in the format ``projects/*/locations/*``.
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
registration (:class:`google.cloud.domains_v1.types.Registration`):
Required. The complete ``Registration`` resource to be
created.
This corresponds to the ``registration`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
yearly_price (:class:`google.type.money_pb2.Money`):
Required. Yearly price to register or
renew the domain. The value that should
be put here can be obtained from
RetrieveRegisterParameters or
SearchDomains calls.
This corresponds to the ``yearly_price`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.cloud.domains_v1.types.Registration` The Registration resource facilitates managing and configuring domain name
registrations.
There are several ways to create a new Registration
resource:
To create a new Registration resource, find a
suitable domain name by calling the SearchDomains
method with a query to see available domain name
options. After choosing a name, call
RetrieveRegisterParameters to ensure availability and
obtain information like pricing, which is needed to
build a call to RegisterDomain.
Another way to create a new Registration is to
transfer an existing domain from another registrar.
First, go to the current registrar to unlock the
domain for transfer and retrieve the domain's
transfer authorization code. Then call
RetrieveTransferParameters to confirm that the domain
is unlocked and to get values needed to build a call
to TransferDomain.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent, registration, yearly_price])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = domains.RegisterDomainRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
if registration is not None:
request.registration = registration
if yearly_price is not None:
request.yearly_price = yearly_price
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.register_domain,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
domains.Registration,
metadata_type=domains.OperationMetadata,
)
# Done; return the response.
return response
async def retrieve_transfer_parameters(
self,
request: Union[domains.RetrieveTransferParametersRequest, dict] = None,
*,
location: str = None,
domain_name: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> domains.RetrieveTransferParametersResponse:
r"""Gets parameters needed to transfer a domain name from another
registrar to Cloud Domains. For domains managed by Google
Domains, transferring to Cloud Domains is not supported.
Use the returned values to call ``TransferDomain``.
Args:
request (Union[google.cloud.domains_v1.types.RetrieveTransferParametersRequest, dict]):
The request object. Request for the
`RetrieveTransferParameters` method.
location (:class:`str`):
Required. The location. Must be in the format
``projects/*/locations/*``.
This corresponds to the ``location`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
domain_name (:class:`str`):
Required. The domain name. Unicode
domain names must be expressed in
Punycode format.
This corresponds to the ``domain_name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.domains_v1.types.RetrieveTransferParametersResponse:
Response for the RetrieveTransferParameters method.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([location, domain_name])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = domains.RetrieveTransferParametersRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if location is not None:
request.location = location
if domain_name is not None:
request.domain_name = domain_name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.retrieve_transfer_parameters,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("location", request.location),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
async def transfer_domain(
self,
request: Union[domains.TransferDomainRequest, dict] = None,
*,
parent: str = None,
registration: domains.Registration = None,
yearly_price: money_pb2.Money = None,
authorization_code: domains.AuthorizationCode = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Transfers a domain name from another registrar to Cloud Domains.
For domains managed by Google Domains, transferring to Cloud
Domains is not supported.
Before calling this method, go to the domain's current registrar
to unlock the domain for transfer and retrieve the domain's
transfer authorization code. Then call
``RetrieveTransferParameters`` to confirm that the domain is
unlocked and to get values needed to build a call to this
method.
A successful call creates a ``Registration`` resource in state
``TRANSFER_PENDING``. It can take several days to complete the
transfer process. The registrant can often speed up this process
by approving the transfer through the current registrar, either
by clicking a link in an email from the registrar or by visiting
the registrar's website.
A few minutes after transfer approval, the resource transitions
to state ``ACTIVE``, indicating that the transfer was
successful. If the transfer is rejected or the request expires
without being approved, the resource can end up in state
``TRANSFER_FAILED``. If transfer fails, you can safely delete
the resource and retry the transfer.
Args:
request (Union[google.cloud.domains_v1.types.TransferDomainRequest, dict]):
The request object. Request for the `TransferDomain`
method.
parent (:class:`str`):
Required. The parent resource of the ``Registration``.
Must be in the format ``projects/*/locations/*``.
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
registration (:class:`google.cloud.domains_v1.types.Registration`):
Required. The complete ``Registration`` resource to be
created.
You can leave ``registration.dns_settings`` unset to
import the domain's current DNS configuration from its
current registrar. Use this option only if you are sure
that the domain's current DNS service does not cease
upon transfer, as is often the case for DNS services
provided for free by the registrar.
This corresponds to the ``registration`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
yearly_price (:class:`google.type.money_pb2.Money`):
Required. Acknowledgement of the price to transfer or
renew the domain for one year. Call
``RetrieveTransferParameters`` to obtain the price,
which you must acknowledge.
This corresponds to the ``yearly_price`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
authorization_code (:class:`google.cloud.domains_v1.types.AuthorizationCode`):
The domain's transfer authorization
code. You can obtain this from the
domain's current registrar.
This corresponds to the ``authorization_code`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.cloud.domains_v1.types.Registration` The Registration resource facilitates managing and configuring domain name
registrations.
There are several ways to create a new Registration
resource:
To create a new Registration resource, find a
suitable domain name by calling the SearchDomains
method with a query to see available domain name
options. After choosing a name, call
RetrieveRegisterParameters to ensure availability and
obtain information like pricing, which is needed to
build a call to RegisterDomain.
Another way to create a new Registration is to
transfer an existing domain from another registrar.
First, go to the current registrar to unlock the
domain for transfer and retrieve the domain's
transfer authorization code. Then call
RetrieveTransferParameters to confirm that the domain
is unlocked and to get values needed to build a call
to TransferDomain.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any(
[parent, registration, yearly_price, authorization_code]
)
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = domains.TransferDomainRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
if registration is not None:
request.registration = registration
if yearly_price is not None:
request.yearly_price = yearly_price
if authorization_code is not None:
request.authorization_code = authorization_code
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.transfer_domain,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
domains.Registration,
metadata_type=domains.OperationMetadata,
)
# Done; return the response.
return response
async def list_registrations(
self,
request: Union[domains.ListRegistrationsRequest, dict] = None,
*,
parent: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> pagers.ListRegistrationsAsyncPager:
r"""Lists the ``Registration`` resources in a project.
Args:
request (Union[google.cloud.domains_v1.types.ListRegistrationsRequest, dict]):
The request object. Request for the `ListRegistrations`
method.
parent (:class:`str`):
Required. The project and location from which to list
``Registration``\ s, specified in the format
``projects/*/locations/*``.
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.domains_v1.services.domains.pagers.ListRegistrationsAsyncPager:
Response for the ListRegistrations method.
Iterating over this object will yield results and
resolve additional pages automatically.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = domains.ListRegistrationsRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.list_registrations,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# This method is paged; wrap the response in a pager, which provides
# an `__aiter__` convenience method.
response = pagers.ListRegistrationsAsyncPager(
method=rpc, request=request, response=response, metadata=metadata,
)
# Done; return the response.
return response
async def get_registration(
self,
request: Union[domains.GetRegistrationRequest, dict] = None,
*,
name: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> domains.Registration:
r"""Gets the details of a ``Registration`` resource.
Args:
request (Union[google.cloud.domains_v1.types.GetRegistrationRequest, dict]):
The request object. Request for the `GetRegistration`
method.
name (:class:`str`):
Required. The name of the ``Registration`` to get, in
the format ``projects/*/locations/*/registrations/*``.
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.domains_v1.types.Registration:
The Registration resource facilitates managing and configuring domain name
registrations.
There are several ways to create a new Registration
resource:
To create a new Registration resource, find a
suitable domain name by calling the SearchDomains
method with a query to see available domain name
options. After choosing a name, call
RetrieveRegisterParameters to ensure availability and
obtain information like pricing, which is needed to
build a call to RegisterDomain.
Another way to create a new Registration is to
transfer an existing domain from another registrar.
First, go to the current registrar to unlock the
domain for transfer and retrieve the domain's
transfer authorization code. Then call
RetrieveTransferParameters to confirm that the domain
is unlocked and to get values needed to build a call
to TransferDomain.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = domains.GetRegistrationRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.get_registration,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
async def update_registration(
self,
request: Union[domains.UpdateRegistrationRequest, dict] = None,
*,
registration: domains.Registration = None,
update_mask: field_mask_pb2.FieldMask = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Updates select fields of a ``Registration`` resource, notably
``labels``. To update other fields, use the appropriate custom
update method:
- To update management settings, see
``ConfigureManagementSettings``
- To update DNS configuration, see ``ConfigureDnsSettings``
- To update contact information, see
``ConfigureContactSettings``
Args:
request (Union[google.cloud.domains_v1.types.UpdateRegistrationRequest, dict]):
The request object. Request for the `UpdateRegistration`
method.
registration (:class:`google.cloud.domains_v1.types.Registration`):
Fields of the ``Registration`` to update.
This corresponds to the ``registration`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`):
Required. The field mask describing which fields to
update as a comma-separated list. For example, if only
the labels are being updated, the ``update_mask`` is
``"labels"``.
This corresponds to the ``update_mask`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.cloud.domains_v1.types.Registration` The Registration resource facilitates managing and configuring domain name
registrations.
There are several ways to create a new Registration
resource:
To create a new Registration resource, find a
suitable domain name by calling the SearchDomains
method with a query to see available domain name
options. After choosing a name, call
RetrieveRegisterParameters to ensure availability and
obtain information like pricing, which is needed to
build a call to RegisterDomain.
Another way to create a new Registration is to
transfer an existing domain from another registrar.
First, go to the current registrar to unlock the
domain for transfer and retrieve the domain's
transfer authorization code. Then call
RetrieveTransferParameters to confirm that the domain
is unlocked and to get values needed to build a call
to TransferDomain.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([registration, update_mask])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = domains.UpdateRegistrationRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if registration is not None:
request.registration = registration
if update_mask is not None:
request.update_mask = update_mask
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.update_registration,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata(
(("registration.name", request.registration.name),)
),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
domains.Registration,
metadata_type=domains.OperationMetadata,
)
# Done; return the response.
return response
async def configure_management_settings(
self,
request: Union[domains.ConfigureManagementSettingsRequest, dict] = None,
*,
registration: str = None,
management_settings: domains.ManagementSettings = None,
update_mask: field_mask_pb2.FieldMask = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Updates a ``Registration``'s management settings.
Args:
request (Union[google.cloud.domains_v1.types.ConfigureManagementSettingsRequest, dict]):
The request object. Request for the
`ConfigureManagementSettings` method.
registration (:class:`str`):
Required. The name of the ``Registration`` whose
management settings are being updated, in the format
``projects/*/locations/*/registrations/*``.
This corresponds to the ``registration`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
management_settings (:class:`google.cloud.domains_v1.types.ManagementSettings`):
Fields of the ``ManagementSettings`` to update.
This corresponds to the ``management_settings`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`):
Required. The field mask describing which fields to
update as a comma-separated list. For example, if only
the transfer lock is being updated, the ``update_mask``
is ``"transfer_lock_state"``.
This corresponds to the ``update_mask`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.cloud.domains_v1.types.Registration` The Registration resource facilitates managing and configuring domain name
registrations.
There are several ways to create a new Registration
resource:
To create a new Registration resource, find a
suitable domain name by calling the SearchDomains
method with a query to see available domain name
options. After choosing a name, call
RetrieveRegisterParameters to ensure availability and
obtain information like pricing, which is needed to
build a call to RegisterDomain.
Another way to create a new Registration is to
transfer an existing domain from another registrar.
First, go to the current registrar to unlock the
domain for transfer and retrieve the domain's
transfer authorization code. Then call
RetrieveTransferParameters to confirm that the domain
is unlocked and to get values needed to build a call
to TransferDomain.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([registration, management_settings, update_mask])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = domains.ConfigureManagementSettingsRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if registration is not None:
request.registration = registration
if management_settings is not None:
request.management_settings = management_settings
if update_mask is not None:
request.update_mask = update_mask
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.configure_management_settings,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata(
(("registration", request.registration),)
),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
domains.Registration,
metadata_type=domains.OperationMetadata,
)
# Done; return the response.
return response
async def configure_dns_settings(
self,
request: Union[domains.ConfigureDnsSettingsRequest, dict] = None,
*,
registration: str = None,
dns_settings: domains.DnsSettings = None,
update_mask: field_mask_pb2.FieldMask = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Updates a ``Registration``'s DNS settings.
Args:
request (Union[google.cloud.domains_v1.types.ConfigureDnsSettingsRequest, dict]):
The request object. Request for the
`ConfigureDnsSettings` method.
registration (:class:`str`):
Required. The name of the ``Registration`` whose DNS
settings are being updated, in the format
``projects/*/locations/*/registrations/*``.
This corresponds to the ``registration`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
dns_settings (:class:`google.cloud.domains_v1.types.DnsSettings`):
Fields of the ``DnsSettings`` to update.
This corresponds to the ``dns_settings`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`):
Required. The field mask describing which fields to
update as a comma-separated list. For example, if only
the name servers are being updated for an existing
Custom DNS configuration, the ``update_mask`` is
``"custom_dns.name_servers"``.
When changing the DNS provider from one type to another,
pass the new provider's field name as part of the field
mask. For example, when changing from a Google Domains
DNS configuration to a Custom DNS configuration, the
``update_mask`` is ``"custom_dns"``. //
This corresponds to the ``update_mask`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.cloud.domains_v1.types.Registration` The Registration resource facilitates managing and configuring domain name
registrations.
There are several ways to create a new Registration
resource:
To create a new Registration resource, find a
suitable domain name by calling the SearchDomains
method with a query to see available domain name
options. After choosing a name, call
RetrieveRegisterParameters to ensure availability and
obtain information like pricing, which is needed to
build a call to RegisterDomain.
Another way to create a new Registration is to
transfer an existing domain from another registrar.
First, go to the current registrar to unlock the
domain for transfer and retrieve the domain's
transfer authorization code. Then call
RetrieveTransferParameters to confirm that the domain
is unlocked and to get values needed to build a call
to TransferDomain.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([registration, dns_settings, update_mask])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = domains.ConfigureDnsSettingsRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if registration is not None:
request.registration = registration
if dns_settings is not None:
request.dns_settings = dns_settings
if update_mask is not None:
request.update_mask = update_mask
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.configure_dns_settings,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata(
(("registration", request.registration),)
),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
domains.Registration,
metadata_type=domains.OperationMetadata,
)
# Done; return the response.
return response
async def configure_contact_settings(
self,
request: Union[domains.ConfigureContactSettingsRequest, dict] = None,
*,
registration: str = None,
contact_settings: domains.ContactSettings = None,
update_mask: field_mask_pb2.FieldMask = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Updates a ``Registration``'s contact settings. Some changes
require confirmation by the domain's registrant contact .
Args:
request (Union[google.cloud.domains_v1.types.ConfigureContactSettingsRequest, dict]):
The request object. Request for the
`ConfigureContactSettings` method.
registration (:class:`str`):
Required. The name of the ``Registration`` whose contact
settings are being updated, in the format
``projects/*/locations/*/registrations/*``.
This corresponds to the ``registration`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
contact_settings (:class:`google.cloud.domains_v1.types.ContactSettings`):
Fields of the ``ContactSettings`` to update.
This corresponds to the ``contact_settings`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`):
Required. The field mask describing which fields to
update as a comma-separated list. For example, if only
the registrant contact is being updated, the
``update_mask`` is ``"registrant_contact"``.
This corresponds to the ``update_mask`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.cloud.domains_v1.types.Registration` The Registration resource facilitates managing and configuring domain name
registrations.
There are several ways to create a new Registration
resource:
To create a new Registration resource, find a
suitable domain name by calling the SearchDomains
method with a query to see available domain name
options. After choosing a name, call
RetrieveRegisterParameters to ensure availability and
obtain information like pricing, which is needed to
build a call to RegisterDomain.
Another way to create a new Registration is to
transfer an existing domain from another registrar.
First, go to the current registrar to unlock the
domain for transfer and retrieve the domain's
transfer authorization code. Then call
RetrieveTransferParameters to confirm that the domain
is unlocked and to get values needed to build a call
to TransferDomain.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([registration, contact_settings, update_mask])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = domains.ConfigureContactSettingsRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if registration is not None:
request.registration = registration
if contact_settings is not None:
request.contact_settings = contact_settings
if update_mask is not None:
request.update_mask = update_mask
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.configure_contact_settings,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata(
(("registration", request.registration),)
),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
domains.Registration,
metadata_type=domains.OperationMetadata,
)
# Done; return the response.
return response
async def export_registration(
self,
request: Union[domains.ExportRegistrationRequest, dict] = None,
*,
name: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Exports a ``Registration`` resource, such that it is no longer
managed by Cloud Domains.
When an active domain is successfully exported, you can continue
to use the domain in `Google
Domains <https://domains.google/>`__ until it expires. The
calling user becomes the domain's sole owner in Google Domains,
and permissions for the domain are subsequently managed there.
The domain does not renew automatically unless the new owner
sets up billing in Google Domains.
Args:
request (Union[google.cloud.domains_v1.types.ExportRegistrationRequest, dict]):
The request object. Request for the `ExportRegistration`
method.
name (:class:`str`):
Required. The name of the ``Registration`` to export, in
the format ``projects/*/locations/*/registrations/*``.
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.cloud.domains_v1.types.Registration` The Registration resource facilitates managing and configuring domain name
registrations.
There are several ways to create a new Registration
resource:
To create a new Registration resource, find a
suitable domain name by calling the SearchDomains
method with a query to see available domain name
options. After choosing a name, call
RetrieveRegisterParameters to ensure availability and
obtain information like pricing, which is needed to
build a call to RegisterDomain.
Another way to create a new Registration is to
transfer an existing domain from another registrar.
First, go to the current registrar to unlock the
domain for transfer and retrieve the domain's
transfer authorization code. Then call
RetrieveTransferParameters to confirm that the domain
is unlocked and to get values needed to build a call
to TransferDomain.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = domains.ExportRegistrationRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.export_registration,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
domains.Registration,
metadata_type=domains.OperationMetadata,
)
# Done; return the response.
return response
async def delete_registration(
self,
request: Union[domains.DeleteRegistrationRequest, dict] = None,
*,
name: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Deletes a ``Registration`` resource.
This method works on any ``Registration`` resource using
`Subscription or Commitment
billing </domains/pricing#billing-models>`__, provided that the
resource was created at least 1 day in the past.
For ``Registration`` resources using `Monthly
billing </domains/pricing#billing-models>`__, this method works
if:
- ``state`` is ``EXPORTED`` with ``expire_time`` in the past
- ``state`` is ``REGISTRATION_FAILED``
- ``state`` is ``TRANSFER_FAILED``
When an active registration is successfully deleted, you can
continue to use the domain in `Google
Domains <https://domains.google/>`__ until it expires. The
calling user becomes the domain's sole owner in Google Domains,
and permissions for the domain are subsequently managed there.
The domain does not renew automatically unless the new owner
sets up billing in Google Domains.
Args:
request (Union[google.cloud.domains_v1.types.DeleteRegistrationRequest, dict]):
The request object. Request for the `DeleteRegistration`
method.
name (:class:`str`):
Required. The name of the ``Registration`` to delete, in
the format ``projects/*/locations/*/registrations/*``.
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated
empty messages in your APIs. A typical example is to
use it as the request or the response type of an API
method. For instance:
service Foo {
rpc Bar(google.protobuf.Empty) returns
(google.protobuf.Empty);
}
The JSON representation for Empty is empty JSON
object {}.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = domains.DeleteRegistrationRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.delete_registration,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
empty_pb2.Empty,
metadata_type=domains.OperationMetadata,
)
# Done; return the response.
return response
async def retrieve_authorization_code(
self,
request: Union[domains.RetrieveAuthorizationCodeRequest, dict] = None,
*,
registration: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> domains.AuthorizationCode:
r"""Gets the authorization code of the ``Registration`` for the
purpose of transferring the domain to another registrar.
You can call this method only after 60 days have elapsed since
the initial domain registration.
Args:
request (Union[google.cloud.domains_v1.types.RetrieveAuthorizationCodeRequest, dict]):
The request object. Request for the
`RetrieveAuthorizationCode` method.
registration (:class:`str`):
Required. The name of the ``Registration`` whose
authorization code is being retrieved, in the format
``projects/*/locations/*/registrations/*``.
This corresponds to the ``registration`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.domains_v1.types.AuthorizationCode:
Defines an authorization code.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([registration])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = domains.RetrieveAuthorizationCodeRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if registration is not None:
request.registration = registration
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.retrieve_authorization_code,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata(
(("registration", request.registration),)
),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
async def reset_authorization_code(
self,
request: Union[domains.ResetAuthorizationCodeRequest, dict] = None,
*,
registration: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> domains.AuthorizationCode:
r"""Resets the authorization code of the ``Registration`` to a new
random string.
You can call this method only after 60 days have elapsed since
the initial domain registration.
Args:
request (Union[google.cloud.domains_v1.types.ResetAuthorizationCodeRequest, dict]):
The request object. Request for the
`ResetAuthorizationCode` method.
registration (:class:`str`):
Required. The name of the ``Registration`` whose
authorization code is being reset, in the format
``projects/*/locations/*/registrations/*``.
This corresponds to the ``registration`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.domains_v1.types.AuthorizationCode:
Defines an authorization code.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([registration])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = domains.ResetAuthorizationCodeRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if registration is not None:
request.registration = registration
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.reset_authorization_code,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata(
(("registration", request.registration),)
),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
async def __aenter__(self):
return self
async def __aexit__(self, exc_type, exc, tb):
await self.transport.close()
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution("google-cloud-domains",).version,
)
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
__all__ = ("DomainsAsyncClient",)
| 43.946733
| 184
| 0.621178
| 8,774
| 80,027
| 5.57602
| 0.065079
| 0.024937
| 0.011038
| 0.015534
| 0.792004
| 0.755539
| 0.737491
| 0.717378
| 0.701108
| 0.694915
| 0
| 0.002428
| 0.315456
| 80,027
| 1,820
| 185
| 43.970879
| 0.890639
| 0.148812
| 0
| 0.611635
| 0
| 0
| 0.054617
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.007862
| false
| 0
| 0.034591
| 0
| 0.102201
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
60563acedface0a1fa5a451f238ecfa5f1f58695
| 100
|
py
|
Python
|
mobilecoind/clients/python/lib/mobilecoin/__init__.py
|
iamalwaysuncomfortable/mobilecoin
|
9d7f88efe181ee498c07f62767ff56aed18a032c
|
[
"Apache-2.0"
] | 140
|
2020-04-15T17:51:12.000Z
|
2020-10-02T19:51:57.000Z
|
mobilecoind/clients/python/lib/mobilecoin/__init__.py
|
iamalwaysuncomfortable/mobilecoin
|
9d7f88efe181ee498c07f62767ff56aed18a032c
|
[
"Apache-2.0"
] | 160
|
2021-09-22T06:21:20.000Z
|
2022-03-28T06:29:06.000Z
|
mobilecoind/clients/python/lib/mobilecoin/__init__.py
|
voloshyn/mobilecoin
|
3fac7a8861aa9e1d8058ddf73860dea1c6db35e0
|
[
"Apache-2.0"
] | 32
|
2020-04-15T18:17:07.000Z
|
2020-10-19T23:25:42.000Z
|
# Copyright (c) 2018-2022 The MobileCoin Foundation
from .client import *
from .utilities import *
| 20
| 51
| 0.76
| 13
| 100
| 5.846154
| 0.846154
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.095238
| 0.16
| 100
| 4
| 52
| 25
| 0.809524
| 0.49
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
60748dad8eaba209e8ee1a0ebd63a767ae9b0499
| 134
|
py
|
Python
|
Lidar_DLG/src/DensityOfProjectedPoints/__init__.py
|
Anonymous772066235/GraduationDesignProgram
|
3337c48005def8515a4660fcaa004dcfbe4766be
|
[
"MIT"
] | null | null | null |
Lidar_DLG/src/DensityOfProjectedPoints/__init__.py
|
Anonymous772066235/GraduationDesignProgram
|
3337c48005def8515a4660fcaa004dcfbe4766be
|
[
"MIT"
] | null | null | null |
Lidar_DLG/src/DensityOfProjectedPoints/__init__.py
|
Anonymous772066235/GraduationDesignProgram
|
3337c48005def8515a4660fcaa004dcfbe4766be
|
[
"MIT"
] | null | null | null |
# File :__inti__.py
# Author :WJ
# Function :
# Time :2021/03/13
# Version :
# Amend :
from .DoPP import run
| 19.142857
| 25
| 0.529851
| 16
| 134
| 4.1875
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.091954
| 0.350746
| 134
| 7
| 26
| 19.142857
| 0.678161
| 0.701493
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
608f08902134159d8cd7d0dd6e942be7f69aee93
| 28
|
py
|
Python
|
test/reference_manager_test.py
|
dkalaxdk/IssueTableGenerator
|
a2999e3417c2514292b1197d8cfe83435f6f1b8d
|
[
"MIT"
] | null | null | null |
test/reference_manager_test.py
|
dkalaxdk/IssueTableGenerator
|
a2999e3417c2514292b1197d8cfe83435f6f1b8d
|
[
"MIT"
] | null | null | null |
test/reference_manager_test.py
|
dkalaxdk/IssueTableGenerator
|
a2999e3417c2514292b1197d8cfe83435f6f1b8d
|
[
"MIT"
] | null | null | null |
#def test_solved_by_finder
| 9.333333
| 26
| 0.857143
| 5
| 28
| 4.2
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.107143
| 28
| 3
| 26
| 9.333333
| 0.84
| 0.892857
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
60b7df6ee83b0291f7c319e9b930ac9c5066ccac
| 336
|
py
|
Python
|
src/Neighbor/ConnectivityMapInst.cc.py
|
markguozhiming/spheral
|
bbb982102e61edb8a1d00cf780bfa571835e1b61
|
[
"BSD-Source-Code",
"BSD-3-Clause-LBNL",
"FSFAP"
] | 1
|
2020-10-21T01:56:55.000Z
|
2020-10-21T01:56:55.000Z
|
src/Neighbor/ConnectivityMapInst.cc.py
|
markguozhiming/spheral
|
bbb982102e61edb8a1d00cf780bfa571835e1b61
|
[
"BSD-Source-Code",
"BSD-3-Clause-LBNL",
"FSFAP"
] | null | null | null |
src/Neighbor/ConnectivityMapInst.cc.py
|
markguozhiming/spheral
|
bbb982102e61edb8a1d00cf780bfa571835e1b61
|
[
"BSD-Source-Code",
"BSD-3-Clause-LBNL",
"FSFAP"
] | null | null | null |
text = """
//------------------------------------------------------------------------------
// Explicit instantiation.
//------------------------------------------------------------------------------
#include "ConnectivityMap.cc"
#include "Geometry/Dimension.hh"
template class Spheral::ConnectivityMap<Spheral::Dim< %(ndim)s > >;
"""
| 33.6
| 80
| 0.372024
| 18
| 336
| 6.944444
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.059524
| 336
| 9
| 81
| 37.333333
| 0.39557
| 0
| 0
| 0.25
| 0
| 0
| 0.958333
| 0.657738
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
60c3c27e35e3af1e06879047be6837c6edfc3633
| 238
|
py
|
Python
|
mmdet3d/models/necks/__init__.py
|
chetanmreddy/imvoxelnet
|
10dd35a96539af7b147be4bb03b0395cc164177e
|
[
"MIT"
] | 1
|
2022-03-11T11:05:35.000Z
|
2022-03-11T11:05:35.000Z
|
mmdet3d/models/necks/__init__.py
|
chetanmreddy/imvoxelnet
|
10dd35a96539af7b147be4bb03b0395cc164177e
|
[
"MIT"
] | null | null | null |
mmdet3d/models/necks/__init__.py
|
chetanmreddy/imvoxelnet
|
10dd35a96539af7b147be4bb03b0395cc164177e
|
[
"MIT"
] | null | null | null |
from mmdet.models.necks.fpn import FPN
from .second_fpn import SECONDFPN
from .imvoxelnet import ImVoxelNeck, KittiImVoxelNeck, NuScenesImVoxelNeck
__all__ = ['FPN', 'SECONDFPN', 'ImVoxelNeck', 'KittiImVoxelNeck', 'NuScenesImVoxelNeck']
| 39.666667
| 88
| 0.806723
| 24
| 238
| 7.791667
| 0.541667
| 0.096257
| 0.491979
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.092437
| 238
| 5
| 89
| 47.6
| 0.865741
| 0
| 0
| 0
| 0
| 0
| 0.243697
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
60e31a7c8664da98ca57a8e0f4fe4197795620b2
| 70
|
py
|
Python
|
hello.py
|
Gustavobrosa/UdemyAutomate
|
0c8b8e19e9564439cd2a8d3ceea668bfbe5df648
|
[
"MIT"
] | null | null | null |
hello.py
|
Gustavobrosa/UdemyAutomate
|
0c8b8e19e9564439cd2a8d3ceea668bfbe5df648
|
[
"MIT"
] | null | null | null |
hello.py
|
Gustavobrosa/UdemyAutomate
|
0c8b8e19e9564439cd2a8d3ceea668bfbe5df648
|
[
"MIT"
] | null | null | null |
#! /usr/bin/python3
import sys
print ('Hello World!')
print (sys.argv)
| 17.5
| 22
| 0.7
| 11
| 70
| 4.454545
| 0.818182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.016129
| 0.114286
| 70
| 4
| 23
| 17.5
| 0.774194
| 0.257143
| 0
| 0
| 0
| 0
| 0.230769
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0.666667
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 5
|
880c3ef477b89a0bc7f97e9f27c6258324396aeb
| 209
|
py
|
Python
|
python/check_time.py
|
Shail-Shouryya/automate_YouTube-Channel-Videos-List
|
b63bbebb7caacc5e99ebf5dc95387d505069953d
|
[
"Apache-2.0"
] | 26
|
2021-01-31T11:52:10.000Z
|
2021-08-01T17:24:55.000Z
|
python/check_time.py
|
Shail-Shouryya/automate_YouTube-Channel-Videos-List
|
b63bbebb7caacc5e99ebf5dc95387d505069953d
|
[
"Apache-2.0"
] | 7
|
2020-06-01T13:14:15.000Z
|
2021-01-09T20:58:17.000Z
|
python/check_time.py
|
Shail-Shouryya/automate_YouTube-Channel-Videos-List
|
b63bbebb7caacc5e99ebf5dc95387d505069953d
|
[
"Apache-2.0"
] | 6
|
2021-03-18T05:46:51.000Z
|
2021-07-19T07:40:37.000Z
|
import time
def check_import_time():
start = time.time()
from yt_videos_list import ListCreator
end = time.time()
print(f'Took {end-start} seconds to import ListCreator')
check_import_time()
| 20.9
| 60
| 0.717703
| 30
| 209
| 4.8
| 0.533333
| 0.208333
| 0.208333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.186603
| 209
| 9
| 61
| 23.222222
| 0.847059
| 0
| 0
| 0
| 0
| 0
| 0.220096
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.142857
| false
| 0
| 0.714286
| 0
| 0.857143
| 0.142857
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
880c61923ea374e35acca5452e0f0d909ae8113f
| 23
|
py
|
Python
|
gid/__init__.py
|
kodemore/gid
|
22adb7853c4fac0ac4af4c51bba9c31e225cc9ac
|
[
"MIT"
] | null | null | null |
gid/__init__.py
|
kodemore/gid
|
22adb7853c4fac0ac4af4c51bba9c31e225cc9ac
|
[
"MIT"
] | null | null | null |
gid/__init__.py
|
kodemore/gid
|
22adb7853c4fac0ac4af4c51bba9c31e225cc9ac
|
[
"MIT"
] | null | null | null |
from .guid import Guid
| 11.5
| 22
| 0.782609
| 4
| 23
| 4.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173913
| 23
| 1
| 23
| 23
| 0.947368
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
717d76b577e6784be997a01abf7f5f6cd7080da4
| 17
|
py
|
Python
|
src/typhoonae/tests/sample/__init__.py
|
sprymak/typhoonae
|
fe31bcc7b21fc14f8aa97b36d66cd7671974543b
|
[
"Apache-2.0"
] | 4
|
2016-11-10T21:43:01.000Z
|
2017-02-24T21:36:45.000Z
|
src/typhoonae/tests/sample/__init__.py
|
sprymak/typhoonae
|
fe31bcc7b21fc14f8aa97b36d66cd7671974543b
|
[
"Apache-2.0"
] | 1
|
2019-04-26T10:48:34.000Z
|
2019-05-18T15:59:35.000Z
|
src/typhoonae/tests/sample/__init__.py
|
sprymak/typhoonae
|
fe31bcc7b21fc14f8aa97b36d66cd7671974543b
|
[
"Apache-2.0"
] | 8
|
2016-11-09T22:25:14.000Z
|
2019-04-26T19:53:37.000Z
|
# Python package
| 8.5
| 16
| 0.764706
| 2
| 17
| 6.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.176471
| 17
| 1
| 17
| 17
| 0.928571
| 0.823529
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
71a45270545bb515d01ba81169622f6f734176be
| 212
|
py
|
Python
|
youtube_v3_api/__init__.py
|
360modder/current-music-trends
|
47b4ac9eef63bdf7786600f75479498df0e72f5b
|
[
"MIT"
] | null | null | null |
youtube_v3_api/__init__.py
|
360modder/current-music-trends
|
47b4ac9eef63bdf7786600f75479498df0e72f5b
|
[
"MIT"
] | null | null | null |
youtube_v3_api/__init__.py
|
360modder/current-music-trends
|
47b4ac9eef63bdf7786600f75479498df0e72f5b
|
[
"MIT"
] | null | null | null |
from .service import Service, YoutubeService
from .videos import Video, Videos
from .playlists import Playlists, ResurrectPlaylist
from .create import Create
from .utils import get_response_item_ids, get_regions
| 42.4
| 53
| 0.84434
| 28
| 212
| 6.25
| 0.535714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.113208
| 212
| 5
| 53
| 42.4
| 0.930851
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
71ab1d923831a6be0f40501da4e21a93d34ca6ba
| 322
|
py
|
Python
|
dobby/views.py
|
CaesiumY/house-elf-dobby
|
f58551db82cfe229f25502a7f4b00508923e773f
|
[
"MIT"
] | null | null | null |
dobby/views.py
|
CaesiumY/house-elf-dobby
|
f58551db82cfe229f25502a7f4b00508923e773f
|
[
"MIT"
] | 3
|
2021-03-19T00:36:15.000Z
|
2021-06-10T18:40:57.000Z
|
dobby/views.py
|
CaesiumY/house-elf-dobby
|
f58551db82cfe229f25502a7f4b00508923e773f
|
[
"MIT"
] | 1
|
2019-08-09T19:51:55.000Z
|
2019-08-09T19:51:55.000Z
|
from django.shortcuts import render
# Create your views here.
def home(request):
return render(request, 'home.html')
def signup(request):
return render(request, 'SignupUser.html')
def userroom(request):
return render(request, 'userroom.html')
def error(request):
return render(request, 'error.html')
| 18.941176
| 45
| 0.720497
| 41
| 322
| 5.658537
| 0.439024
| 0.224138
| 0.327586
| 0.448276
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.158385
| 322
| 17
| 46
| 18.941176
| 0.856089
| 0.071429
| 0
| 0
| 0
| 0
| 0.157718
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.444444
| false
| 0
| 0.111111
| 0.444444
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 5
|
71ac6ddc912f0ea3e3a031b43106fac9d01fba68
| 1,852
|
py
|
Python
|
063_3d-bounding-box-estimation-for-autonomous-driving/01_float32/13_integer_quantization.py
|
IgiArdiyanto/PINTO_model_zoo
|
9247b56a7dff37f28a8a7822a7ef4dd9adf7234d
|
[
"MIT"
] | 1,529
|
2019-12-11T13:36:23.000Z
|
2022-03-31T18:38:27.000Z
|
063_3d-bounding-box-estimation-for-autonomous-driving/01_float32/13_integer_quantization.py
|
IgiArdiyanto/PINTO_model_zoo
|
9247b56a7dff37f28a8a7822a7ef4dd9adf7234d
|
[
"MIT"
] | 200
|
2020-01-06T09:24:42.000Z
|
2022-03-31T17:29:08.000Z
|
063_3d-bounding-box-estimation-for-autonomous-driving/01_float32/13_integer_quantization.py
|
IgiArdiyanto/PINTO_model_zoo
|
9247b56a7dff37f28a8a7822a7ef4dd9adf7234d
|
[
"MIT"
] | 288
|
2020-02-21T14:56:02.000Z
|
2022-03-30T03:00:35.000Z
|
### tensorflow==2.3.1
import tensorflow as tf
import tensorflow_datasets as tfds
import numpy as np
def representative_dataset_gen_256x256():
for data in raw_test_data.take(10):
image = data['image'].numpy()
image = tf.image.resize(image, (256, 256))
image = image[np.newaxis,:,:,:]
image = image - 127.5
image = image * 0.007843
yield [image]
def representative_dataset_gen_320x320():
for data in raw_test_data.take(100):
image = data['image'].numpy()
image = tf.image.resize(image, (320, 320))
image = image[np.newaxis,:,:,:]
image = image - 127.5
image = image * 0.007843
yield [image]
raw_test_data, info = tfds.load(name="coco/2017", with_info=True, split="test", data_dir="~/TFDS", download=False)
# Integer Quantization - Input/Output=float32
height = 256
width = 256
converter = tf.lite.TFLiteConverter.from_saved_model('saved_model_{}x{}'.format(height, width))
converter.optimizations = [tf.lite.Optimize.DEFAULT]
converter.representative_dataset = representative_dataset_gen_256x256
tflite_model = converter.convert()
with open('3dbox_mbnv2_{}x{}_integer_quant.tflite'.format(height, width), 'wb') as w:
w.write(tflite_model)
print('Integer Quantization complete! - 3dbox_mbnv2_{}x{}_integer_quant.tflite'.format(height, width))
height = 320
width = 320
converter = tf.lite.TFLiteConverter.from_saved_model('saved_model_{}x{}'.format(height, width))
converter.optimizations = [tf.lite.Optimize.DEFAULT]
converter.representative_dataset = representative_dataset_gen_320x320
tflite_model = converter.convert()
with open('3dbox_mbnv2_{}x{}_integer_quant.tflite'.format(height, width), 'wb') as w:
w.write(tflite_model)
print('Integer Quantization complete! - 3dbox_mbnv2_{}x{}_integer_quant.tflite'.format(height, width))
| 37.795918
| 114
| 0.718683
| 247
| 1,852
| 5.182186
| 0.303644
| 0.098438
| 0.079688
| 0.05625
| 0.74375
| 0.74375
| 0.74375
| 0.70625
| 0.70625
| 0.640625
| 0
| 0.058191
| 0.146328
| 1,852
| 48
| 115
| 38.583333
| 0.751423
| 0.032937
| 0
| 0.578947
| 0
| 0
| 0.159574
| 0.085106
| 0
| 0
| 0
| 0
| 0
| 1
| 0.052632
| false
| 0
| 0.078947
| 0
| 0.131579
| 0.052632
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
71c7d44a9e529b361915190ad0ba9a3f9a90535f
| 158
|
py
|
Python
|
stardist_napari/__init__.py
|
tlambert03/stardist-napari
|
a8c8aa86c7472a2a943eec923c5644ac634922be
|
[
"BSD-3-Clause"
] | null | null | null |
stardist_napari/__init__.py
|
tlambert03/stardist-napari
|
a8c8aa86c7472a2a943eec923c5644ac634922be
|
[
"BSD-3-Clause"
] | null | null | null |
stardist_napari/__init__.py
|
tlambert03/stardist-napari
|
a8c8aa86c7472a2a943eec923c5644ac634922be
|
[
"BSD-3-Clause"
] | null | null | null |
from ._version import __version__
# from ._dock_widget import napari_experimental_provide_dock_widget
# from ._sample_data import napari_provide_sample_data
| 31.6
| 67
| 0.873418
| 21
| 158
| 5.809524
| 0.47619
| 0.163934
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.094937
| 158
| 4
| 68
| 39.5
| 0.853147
| 0.746835
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
e0981db4fa2a8ac77990c97267884866a94d2615
| 199
|
py
|
Python
|
examples/python_tests/recsys2021/init.py
|
XinyaoWa/recdp
|
cc5bd6a29fc3cf7aecac0cda5d0e1473c0f8997e
|
[
"CC-BY-3.0",
"Apache-2.0",
"CC0-1.0"
] | 1
|
2021-11-05T07:04:00.000Z
|
2021-11-05T07:04:00.000Z
|
examples/python_tests/recsys2021/init.py
|
XinyaoWa/recdp
|
cc5bd6a29fc3cf7aecac0cda5d0e1473c0f8997e
|
[
"CC-BY-3.0",
"Apache-2.0",
"CC0-1.0"
] | 2
|
2021-07-23T05:27:47.000Z
|
2021-07-30T06:50:15.000Z
|
examples/python_tests/recsys2021/init.py
|
XinyaoWa/recdp
|
cc5bd6a29fc3cf7aecac0cda5d0e1473c0f8997e
|
[
"CC-BY-3.0",
"Apache-2.0",
"CC0-1.0"
] | 8
|
2021-07-17T08:38:53.000Z
|
2021-12-06T07:42:17.000Z
|
import os
import sys
import pathlib
import_path = str(pathlib.Path(__file__).parent.parent.parent.parent.absolute())
print(import_path)
sys.path.append(import_path)
import findspark
findspark.init()
| 22.111111
| 80
| 0.819095
| 29
| 199
| 5.37931
| 0.448276
| 0.192308
| 0.230769
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.070352
| 199
| 8
| 81
| 24.875
| 0.843243
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.875
| 0
| 0.875
| 0.125
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
e0cd345991451d14a0c20d9211135ea6524de003
| 433
|
py
|
Python
|
mixin/__init__.py
|
martmists-gh/pymixin
|
ef6aa6207182ea84c050325d4e1751ac8908598a
|
[
"MIT"
] | 3
|
2021-09-25T17:34:59.000Z
|
2021-11-12T12:01:19.000Z
|
mixin/__init__.py
|
martmists-gh/pymixin
|
ef6aa6207182ea84c050325d4e1751ac8908598a
|
[
"MIT"
] | null | null | null |
mixin/__init__.py
|
martmists-gh/pymixin
|
ef6aa6207182ea84c050325d4e1751ac8908598a
|
[
"MIT"
] | null | null | null |
from mixin.annotate import At, AtValue, UnsupportedInjectableError
from mixin.api import inject, overwrite, redirect, modify_const, modify_var
from mixin.callback import CallbackInfo, CancellationException
from mixin.util import unwrap
__all__ = ("inject", "overwrite", "redirect", "modify_const", "modify_var",
"At", "AtValue", "UnsupportedInjectableError", "CallbackInfo",
"CancellationException", "unwrap")
| 48.111111
| 75
| 0.757506
| 43
| 433
| 7.44186
| 0.465116
| 0.1125
| 0.21875
| 0.18125
| 0.26875
| 0.26875
| 0.26875
| 0
| 0
| 0
| 0
| 0
| 0.136259
| 433
| 8
| 76
| 54.125
| 0.855615
| 0
| 0
| 0
| 0
| 0
| 0.274827
| 0.108545
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.571429
| 0
| 0.571429
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
e0ebb2271e07c44d3fc8d74f8bfaed2c5c6abb6f
| 122
|
py
|
Python
|
base/config/__init__.py
|
klen/Flask-Foundation
|
d154886a8a4358a3bfb99d189a6401e422fea416
|
[
"BSD-3-Clause"
] | 36
|
2015-01-03T07:57:07.000Z
|
2019-12-09T10:48:08.000Z
|
base/config/__init__.py
|
klen/Flask-Foundation
|
d154886a8a4358a3bfb99d189a6401e422fea416
|
[
"BSD-3-Clause"
] | null | null | null |
base/config/__init__.py
|
klen/Flask-Foundation
|
d154886a8a4358a3bfb99d189a6401e422fea416
|
[
"BSD-3-Clause"
] | 11
|
2015-02-07T01:03:23.000Z
|
2021-09-24T07:13:38.000Z
|
from os import path as op
ROOTDIR = op.abspath(
op.dirname(
op.dirname(
op.dirname(__file__))))
| 15.25
| 35
| 0.581967
| 16
| 122
| 4.1875
| 0.625
| 0.402985
| 0.328358
| 0.537313
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.311475
| 122
| 7
| 36
| 17.428571
| 0.797619
| 0
| 0
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.2
| 0
| 0.2
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
e0f1f606bfb9a3f0668cc800b33d45096039969c
| 242
|
py
|
Python
|
layers/modules/__init__.py
|
pengfeidip/refindeDet_Pytorch
|
743a6ffdfc6efeef7bce5ce9fb4adb5211b7635f
|
[
"MIT"
] | 12
|
2019-06-23T13:22:48.000Z
|
2020-08-11T03:04:34.000Z
|
layers/modules/__init__.py
|
pengfeidip/refindeDet_Pytorch
|
743a6ffdfc6efeef7bce5ce9fb4adb5211b7635f
|
[
"MIT"
] | 4
|
2019-06-25T13:52:05.000Z
|
2019-08-27T23:09:09.000Z
|
layers/modules/__init__.py
|
pengfeidip/refindeDet_Pytorch
|
743a6ffdfc6efeef7bce5ce9fb4adb5211b7635f
|
[
"MIT"
] | null | null | null |
from .l2norm import L2Norm
from .multibox_loss import MultiBoxLoss
from .refinedet_multibox_loss import RefineDetMultiBoxLoss
from .redinedet_focal_loss import RefineDetFocalLoss
__all__ = ['L2Norm', 'MultiBoxLoss', 'RefineDetMultiBoxLoss']
| 34.571429
| 61
| 0.847107
| 25
| 242
| 7.84
| 0.48
| 0.153061
| 0.183673
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.013636
| 0.090909
| 242
| 6
| 62
| 40.333333
| 0.877273
| 0
| 0
| 0
| 0
| 0
| 0.161157
| 0.086777
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.8
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
1c9d3fb42bd875da4d567458ec450a89d679f209
| 3,514
|
py
|
Python
|
change_timestamp_from_logs.py
|
eLRuLL/docker-elk
|
cf1ceccdc0edf9472c51c54f3a51aaed041b91ff
|
[
"MIT"
] | null | null | null |
change_timestamp_from_logs.py
|
eLRuLL/docker-elk
|
cf1ceccdc0edf9472c51c54f3a51aaed041b91ff
|
[
"MIT"
] | null | null | null |
change_timestamp_from_logs.py
|
eLRuLL/docker-elk
|
cf1ceccdc0edf9472c51c54f3a51aaed041b91ff
|
[
"MIT"
] | null | null | null |
import re
from datetime import datetime
files = [
'ws.2018-08-02.log',
'ws.2018-08-03.log',
'ws.2018-08-04.log',
'ws.2018-08-05.log',
'ws.2018-08-06.log',
'ws.2018-08-07.log',
'ws.2018-08-08.log',
'ws.2018-08-09.log',
'ws.2018-08-10.log',
'ws.2018-08-11.log',
'ws.2018-08-12.log',
'ws.2018-08-13.log',
'ws.2018-08-14.log',
'ws.2018-08-15.log',
'ws.2018-08-16.log',
'ws.2018-08-17.log',
'ws.2018-08-18.log',
'ws.2018-08-19.log',
'ws.2018-08-20.log',
'ws.2018-08-21.log',
'ws.2018-08-22.log',
'ws.2018-08-23.log',
'ws.2018-08-24.log',
'ws.2018-08-25.log',
'ws.2018-08-26.log',
'ws.2018-08-27.log',
'ws.2018-08-28.log',
'ws.2018-08-29.log',
'ws.2018-08-30.log',
'ws.2018-08-31.log',
'ws.2018-09-01.log',
'ws.2018-09-02.log',
'ws.2018-09-03.log',
'ws.2018-09-04.log',
'ws.2018-09-05.log',
'ws.2018-09-06.log',
'ws.2018-09-07.log',
'ws.2018-09-08.log',
'ws.2018-09-09.log',
'ws.2018-09-10.log',
'ws.2018-09-11.log',
'ws.2018-09-12.log',
'ws.2018-09-13.log',
'ws.2018-09-14.log',
'ws.2018-09-15.log',
'ws.2018-09-16.log',
'ws.2018-09-17.log',
'ws.2018-09-18.log',
'ws.2018-09-19.log',
'ws.2018-09-20.log',
'ws.2018-09-21.log',
'ws.2018-09-22.log',
'ws.2018-09-23.log',
'ws.2018-09-24.log',
'ws.2018-09-25.log',
'ws.2018-09-26.log',
'ws.2018-09-27.log',
'ws.2018-09-28.log',
'ws.2018-09-29.log',
'ws.2018-09-30.log',
'ws.2018-10-01.log',
'ws.2018-10-02.log',
'ws.2018-10-03.log',
'ws.2018-10-04.log',
'ws.2018-10-05.log',
'ws.2018-10-06.log',
'ws.2018-10-07.log',
'ws.2018-10-08.log',
'ws.2018-10-09.log',
'ws.2018-10-10.log',
'ws.2018-10-11.log',
'ws.2018-10-12.log',
'ws.2018-10-13.log',
'ws.2018-10-14.log',
'ws.2018-10-15.log',
'ws.2018-10-16.log',
'ws.2018-10-17.log',
'ws.2018-10-18.log',
'ws.2018-10-19.log',
'ws.2018-10-20.log',
'ws.2018-10-21.log',
'ws.2018-10-22.log',
'ws.2018-10-23.log',
'ws.2018-10-24.log',
'ws.2018-10-25.log',
'ws.2018-10-26.log',
'ws.2018-10-27.log',
'ws.2018-10-28.log',
'ws.2018-10-29.log',
'ws.2018-10-30.log',
'ws.2018-10-31.log',
'ws.2018-11-01.log',
'ws.2018-11-02.log',
'ws.2018-11-03.log',
'ws.2018-11-04.log',
'ws.2018-11-05.log',
'ws.2018-11-06.log',
'ws.2018-11-07.log',
'ws.2018-11-08.log',
'ws.2018-11-09.log',
'ws.2018-11-10.log',
'ws.2018-11-11.log',
'ws.2018-11-12.log',
'ws.2018-11-13.log',
'ws.2018-11-14.log',
'ws.2018-11-15.log',
'ws.2018-11-16.log',
'ws.2018-11-17.log',
'ws.2018-11-18.log',
'ws.2018-11-19.log',
'ws.2018-11-20.log',
'ws.2018-11-21.log',
'ws.2018-11-22.log',
'ws.2018-11-23.log',
'ws.2018-11-24.log',
'ws.2018-11-25.log',
'ws.2018-11-26.log',
'ws.2018-11-27.log',
'ws.2018-11-28.log',
'ws.log',
]
for input_file_name in files:
output_file_name = '_' + input_file_name
with open(input_file_name, 'r') as f1, open(output_file_name, 'w') as f2:
for line in f1:
matched = re.search('^\[(.+?)\]', line)
if matched:
found_date = matched.group(1)
new_date = datetime.strptime(found_date, '%a %b %d %H:%M:%S %Y').strftime('%Y-%m-%d %H:%M:%S')
f2.write(line.replace(found_date, new_date))
| 25.838235
| 110
| 0.527604
| 674
| 3,514
| 2.727003
| 0.106825
| 0.388466
| 0.577802
| 0.185528
| 0.014146
| 0
| 0
| 0
| 0
| 0
| 0
| 0.345238
| 0.211155
| 3,514
| 135
| 111
| 26.02963
| 0.317821
| 0
| 0
| 0
| 0
| 0
| 0.591633
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.015038
| 0
| 0.015038
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
1ca6a51cfa4d6af82c151bcf9dacb117a82fcd65
| 185
|
py
|
Python
|
elementalcms/management/pagescommands/__init__.py
|
paranoid-software/elemental-cms
|
7f09f9cd5498577d23fa70d1a51497b9de232598
|
[
"MIT"
] | 3
|
2022-01-12T09:11:54.000Z
|
2022-02-24T22:39:11.000Z
|
elementalcms/management/pagescommands/__init__.py
|
paranoid-software/elemental-cms
|
7f09f9cd5498577d23fa70d1a51497b9de232598
|
[
"MIT"
] | null | null | null |
elementalcms/management/pagescommands/__init__.py
|
paranoid-software/elemental-cms
|
7f09f9cd5498577d23fa70d1a51497b9de232598
|
[
"MIT"
] | 1
|
2022-01-12T09:11:56.000Z
|
2022-01-12T09:11:56.000Z
|
from .list import List
from .create import Create
from .remove import Remove
from .push import Push
from .pull import Pull
from .publish import Publish
from .unpublish import Unpublish
| 23.125
| 32
| 0.810811
| 28
| 185
| 5.357143
| 0.321429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.151351
| 185
| 7
| 33
| 26.428571
| 0.955414
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
1cf35c6ad603a5bdfd47612bf85694e7ccf5e1bf
| 94
|
py
|
Python
|
tailorscad/builder/coffeescad.py
|
savorywatt/tailorSCAD
|
dc0dd76da780a635fc2694bc7396aaf418dcca12
|
[
"MIT"
] | null | null | null |
tailorscad/builder/coffeescad.py
|
savorywatt/tailorSCAD
|
dc0dd76da780a635fc2694bc7396aaf418dcca12
|
[
"MIT"
] | null | null | null |
tailorscad/builder/coffeescad.py
|
savorywatt/tailorSCAD
|
dc0dd76da780a635fc2694bc7396aaf418dcca12
|
[
"MIT"
] | null | null | null |
# TODO Wrap coffeescad command line to build it
def build_with_coffeescad(state):
pass
| 13.428571
| 47
| 0.755319
| 14
| 94
| 4.928571
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.202128
| 94
| 6
| 48
| 15.666667
| 0.92
| 0.478723
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 0
| 1
| 0.5
| false
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
e820327fd5165c72db4ed503b804ea92562ca7c5
| 5,300
|
py
|
Python
|
mava/systems/tf/mad4pg/execution.py
|
sash-a/Mava
|
976d0863e058fd92f066d8a8fabe2f5e2f3f60ce
|
[
"Apache-2.0"
] | null | null | null |
mava/systems/tf/mad4pg/execution.py
|
sash-a/Mava
|
976d0863e058fd92f066d8a8fabe2f5e2f3f60ce
|
[
"Apache-2.0"
] | null | null | null |
mava/systems/tf/mad4pg/execution.py
|
sash-a/Mava
|
976d0863e058fd92f066d8a8fabe2f5e2f3f60ce
|
[
"Apache-2.0"
] | null | null | null |
# python3
# Copyright 2021 InstaDeep Ltd. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""MAD4PG system executor implementation."""
from typing import Any, Dict, List, Optional
import sonnet as snt
from acme.specs import EnvironmentSpec
from acme.tf import variable_utils as tf2_variable_utils
from mava import adders
from mava.systems.tf.maddpg.execution import (
MADDPGFeedForwardExecutor,
MADDPGRecurrentExecutor,
)
class MAD4PGFeedForwardExecutor(MADDPGFeedForwardExecutor):
"""A feed-forward executor for MAD4PG.
An executor based on a feed-forward policy for each agent in the system.
"""
def __init__(
self,
policy_networks: Dict[str, snt.Module],
agent_specs: Dict[str, EnvironmentSpec],
agent_net_keys: Dict[str, str],
network_sampling_setup: List,
net_keys_to_ids: Dict[str, int],
evaluator: bool = False,
adder: Optional[adders.ReverbParallelAdder] = None,
counts: Optional[Dict[str, Any]] = None,
variable_client: Optional[tf2_variable_utils.VariableClient] = None,
interval: Optional[dict] = None,
):
"""Initialise the system executor
Args:
policy_networks: policy networks for each agent in
the system.
agent_specs: agent observation and action
space specifications.
agent_net_keys: specifies what network each agent uses.
network_sampling_setup: List of networks that are randomly
sampled from by the executors at the start of an environment run.
net_keys_to_ids: Specifies a mapping from network keys to their integer id.
adder: adder which sends data
to a replay buffer. Defaults to None.
counts: Count values used to record excutor episode and steps.
variable_client:
client to copy weights from the trainer. Defaults to None.
evaluator: whether the executor will be used for
evaluation.
interval: interval that evaluations are run at.
"""
super().__init__(
policy_networks=policy_networks,
agent_specs=agent_specs,
adder=adder,
variable_client=variable_client,
counts=counts,
agent_net_keys=agent_net_keys,
network_sampling_setup=network_sampling_setup,
net_keys_to_ids=net_keys_to_ids,
evaluator=evaluator,
interval=interval,
)
class MAD4PGRecurrentExecutor(MADDPGRecurrentExecutor):
"""A recurrent executor for MAD4PG.
An executor based on a recurrent policy for each agent in the system.
"""
def __init__(
self,
policy_networks: Dict[str, snt.Module],
agent_specs: Dict[str, EnvironmentSpec],
agent_net_keys: Dict[str, str],
network_sampling_setup: List,
net_keys_to_ids: Dict[str, int],
evaluator: bool = False,
adder: Optional[adders.ReverbParallelAdder] = None,
counts: Optional[Dict[str, Any]] = None,
variable_client: Optional[tf2_variable_utils.VariableClient] = None,
interval: Optional[dict] = None,
):
"""Initialise the system executor
Args:
policy_networks: policy networks for each agent in
the system.
agent_specs: agent observation and action
space specifications.
agent_net_keys: specifies what network each agent uses.
network_sampling_setup: List of networks that are randomly
sampled from by the executors at the start of an environment run.
net_keys_to_ids: Specifies a mapping from network keys to their integer id.
adder: adder which sends data
to a replay buffer. Defaults to None.
counts: Count values used to record excutor episode and steps.
variable_client:
client to copy weights from the trainer. Defaults to None.
store_recurrent_state: boolean to store the recurrent
network hidden state. Defaults to True.
evaluator: whether the executor will be used for
evaluation.
interval: interval that evaluations are run at.
"""
super().__init__(
policy_networks=policy_networks,
agent_specs=agent_specs,
adder=adder,
variable_client=variable_client,
counts=counts,
agent_net_keys=agent_net_keys,
network_sampling_setup=network_sampling_setup,
net_keys_to_ids=net_keys_to_ids,
evaluator=evaluator,
interval=interval,
)
| 38.970588
| 87
| 0.653962
| 627
| 5,300
| 5.360447
| 0.275917
| 0.033323
| 0.028563
| 0.028563
| 0.713478
| 0.713478
| 0.713478
| 0.713478
| 0.692651
| 0.692651
| 0
| 0.004508
| 0.288491
| 5,300
| 135
| 88
| 39.259259
| 0.886767
| 0.488302
| 0
| 0.786885
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.032787
| false
| 0
| 0.098361
| 0
| 0.163934
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
08fc2c47174c7e7c6bd690009186737b4540bafa
| 146
|
py
|
Python
|
seafileapi/admin.py
|
ilyaglow/python-seafile
|
93d220204be0ab191a321c1bddbef5f6ec043762
|
[
"Apache-2.0"
] | 31
|
2015-05-09T21:16:50.000Z
|
2022-01-16T04:05:48.000Z
|
seafileapi/admin.py
|
ilyaglow/python-seafile
|
93d220204be0ab191a321c1bddbef5f6ec043762
|
[
"Apache-2.0"
] | 10
|
2015-06-02T02:26:24.000Z
|
2021-11-21T15:47:41.000Z
|
seafileapi/admin.py
|
ilyaglow/python-seafile
|
93d220204be0ab191a321c1bddbef5f6ec043762
|
[
"Apache-2.0"
] | 37
|
2015-05-10T09:39:34.000Z
|
2022-02-22T19:51:45.000Z
|
class SeafileAdmin(object):
def lists_users(self, maxcount=100):
pass
def list_user_repos(self, username):
pass
| 18.25
| 40
| 0.616438
| 17
| 146
| 5.117647
| 0.823529
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.029412
| 0.30137
| 146
| 7
| 41
| 20.857143
| 0.823529
| 0
| 0
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0.4
| 0
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 5
|
08fd191092e30d5edf720aadf20d28af1feef528
| 840
|
py
|
Python
|
pirates/leveleditor/worldData/CubaWorld.py
|
Willy5s/Pirates-Online-Rewritten
|
7434cf98d9b7c837d57c181e5dabd02ddf98acb7
|
[
"BSD-3-Clause"
] | 81
|
2018-04-08T18:14:24.000Z
|
2022-01-11T07:22:15.000Z
|
pirates/leveleditor/worldData/CubaWorld.py
|
Willy5s/Pirates-Online-Rewritten
|
7434cf98d9b7c837d57c181e5dabd02ddf98acb7
|
[
"BSD-3-Clause"
] | 4
|
2018-09-13T20:41:22.000Z
|
2022-01-08T06:57:00.000Z
|
pirates/leveleditor/worldData/CubaWorld.py
|
Willy5s/Pirates-Online-Rewritten
|
7434cf98d9b7c837d57c181e5dabd02ddf98acb7
|
[
"BSD-3-Clause"
] | 26
|
2018-05-26T12:49:27.000Z
|
2021-09-11T09:11:59.000Z
|
from pandac.PandaModules import Point3, VBase3
objectStruct = {'Objects': {'1160614503.81sdnaik': {'Type': 'Region','Name': 'default','Objects': {'1160614528.73sdnaik': {'Type': 'Island','File': 'CubaIsland','Hpr': Point3(0.0, 0.0, 0.0),'Objects': {'1163130882.92sdnaik': {'Type': 'LOD Sphere','Hpr': Point3(0.0, 0.0, 0.0),'Pos': Point3(154.735, 0.0, 121.512),'Radi': [3594, 4594, 5594],'Scale': VBase3(1.0, 1.0, 1.0)}},'Pos': Point3(-43.873, 98.968, 0.0),'Visual': {'Model': 'models/islands/pir_m_are_isl_cuba'}}},'Visual': {}}},'Layers': {},'ObjectIds': {'1160614503.81sdnaik': '["Objects"]["1160614503.81sdnaik"]','1160614528.73sdnaik': '["Objects"]["1160614503.81sdnaik"]["Objects"]["1160614528.73sdnaik"]','1163130882.92sdnaik': '["Objects"]["1160614503.81sdnaik"]["Objects"]["1160614528.73sdnaik"]["Objects"]["1163130882.92sdnaik"]'}}
| 420
| 793
| 0.665476
| 107
| 840
| 5.186916
| 0.476636
| 0.043243
| 0.043243
| 0.043243
| 0.234234
| 0.234234
| 0.054054
| 0.054054
| 0
| 0
| 0
| 0.260377
| 0.053571
| 840
| 2
| 793
| 420
| 0.437736
| 0
| 0
| 0
| 0
| 0
| 0.575505
| 0.281807
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
1c1990b20d3b7cc6ddfdb1498aa7c0fd35f29082
| 48
|
py
|
Python
|
moya/elements/__init__.py
|
moyaproject/moya
|
78b91d87b4519f91dfdd2b40dab44e72f201a843
|
[
"MIT"
] | 129
|
2015-02-16T12:02:50.000Z
|
2021-11-06T00:20:01.000Z
|
moya/elements/__init__.py
|
liaohandel/moya
|
78b91d87b4519f91dfdd2b40dab44e72f201a843
|
[
"MIT"
] | 5
|
2015-02-19T15:56:41.000Z
|
2015-09-08T18:58:35.000Z
|
moya/elements/__init__.py
|
liaohandel/moya
|
78b91d87b4519f91dfdd2b40dab44e72f201a843
|
[
"MIT"
] | 14
|
2015-02-19T17:20:34.000Z
|
2022-03-28T01:38:09.000Z
|
from .elementbase import ElementBase, Attribute
| 24
| 47
| 0.854167
| 5
| 48
| 8.2
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.104167
| 48
| 1
| 48
| 48
| 0.953488
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
1c2f6d943cfc6ffe509c5db07d44559569799c6f
| 71
|
py
|
Python
|
PycharmProjects/PythonExercicios/ex001.py
|
RodrigoMASRamos/Projects.py
|
ed15981b320914c9667305dcd5fb5b7906fd9b00
|
[
"MIT"
] | null | null | null |
PycharmProjects/PythonExercicios/ex001.py
|
RodrigoMASRamos/Projects.py
|
ed15981b320914c9667305dcd5fb5b7906fd9b00
|
[
"MIT"
] | null | null | null |
PycharmProjects/PythonExercicios/ex001.py
|
RodrigoMASRamos/Projects.py
|
ed15981b320914c9667305dcd5fb5b7906fd9b00
|
[
"MIT"
] | null | null | null |
#Crie um programa que escreva "Olá,Mundo!" na tela.
print('Olá,mundo!')
| 35.5
| 51
| 0.71831
| 12
| 71
| 4.25
| 0.833333
| 0.313725
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.112676
| 71
| 2
| 52
| 35.5
| 0.809524
| 0.704225
| 0
| 0
| 0
| 0
| 0.47619
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
1c455ea2754f11157926ef47242fd8393fbd2d15
| 9,560
|
py
|
Python
|
mlfromscratch/supervised_learning/regression.py
|
sourcepirate/ML-From-Scratch
|
c6839bf47c360d6fa48861302fd90ccd4a8c38db
|
[
"MIT"
] | null | null | null |
mlfromscratch/supervised_learning/regression.py
|
sourcepirate/ML-From-Scratch
|
c6839bf47c360d6fa48861302fd90ccd4a8c38db
|
[
"MIT"
] | null | null | null |
mlfromscratch/supervised_learning/regression.py
|
sourcepirate/ML-From-Scratch
|
c6839bf47c360d6fa48861302fd90ccd4a8c38db
|
[
"MIT"
] | 2
|
2017-10-03T07:45:16.000Z
|
2018-12-21T01:31:21.000Z
|
from __future__ import print_function, division
import numpy as np
import math
from mlfromscratch.utils import normalize, polynomial_features
class Regression(object):
""" Base regression model. Models the relationship between a scalar dependent variable y and the independent
variables X.
Parameters:
-----------
reg_factor: float
The factor that will determine the amount of regularization and feature
shrinkage.
n_iterations: float
The number of training iterations the algorithm will tune the weights for.
learning_rate: float
The step length that will be used when updating the weights.
gradient_descent: boolean
True or false depending if gradient descent should be used when training. If
false then we use batch optimization by least squares.
"""
def __init__(self, reg_factor, n_iterations, learning_rate, gradient_descent):
self.w = None
self.n_iterations = n_iterations
self.learning_rate = learning_rate
self.gradient_descent = gradient_descent
self.reg_factor = reg_factor
def initialize_weights(self, n_features):
""" Initialize weights randomly [-1/N, 1/N] """
limit = 1 / math.sqrt(n_features)
self.w = np.random.uniform(-limit, limit, (n_features, ))
def regularization(self):
# No regularization by default
return 0
def regularization_gradient(self):
# No regularization by default
return 0
def fit(self, X, y):
# Insert constant ones as first column (for bias weights)
X = np.insert(X, 0, 1, axis=1)
n_features = np.shape(X)[1]
# Get weights by gradient descent opt.
if self.gradient_descent:
self.training_errors = []
self.initialize_weights(n_features)
# Do gradient descent for n_iterations
for _ in range(self.n_iterations):
y_pred = X.dot(self.w)
# Calculate mean squared error
mse = np.mean(0.5 * (y - y_pred)**2 + self.regularization())
self.training_errors.append(mse)
# Gradient of l2 loss w.r.t w
grad_w = - (y - y_pred).dot(X) + self.regularization_gradient()
# Update the weights
self.w -= self.learning_rate * grad_w
# Get weights by least squares (using Moore-Penrose pseudoinverse)
else:
U, S, V = np.linalg.svd(X.T.dot(X) + self.reg_factor * np.identity(n_features))
S = np.diag(S)
X_sq_reg_inv = V.dot(np.linalg.pinv(S)).dot(U.T)
self.w = X_sq_reg_inv.dot(X.T).dot(y)
def predict(self, X):
# Insert constant ones for bias weights
X = np.insert(X, 0, 1, axis=1)
y_pred = X.dot(self.w)
return y_pred
class LinearRegression(Regression):
"""Linear model.
Parameters:
-----------
n_iterations: float
The number of training iterations the algorithm will tune the weights for.
learning_rate: float
The step length that will be used when updating the weights.
gradient_descent: boolean
True or false depending if gradient descent should be used when training. If
false then we use batch optimization by least squares.
"""
def __init__(self, n_iterations=100, learning_rate=0.001, gradient_descent=True):
super(LinearRegression, self).__init__(reg_factor=0, n_iterations=n_iterations, \
learning_rate=learning_rate, gradient_descent=gradient_descent)
class PolynomialRegression(Regression):
"""Performs a non-linear transformation of the data before fitting the model
and doing predictions which allows for doing non-linear regression.
Parameters:
-----------
degree: int
The power of the polynomial that the independent variable X will be transformed to.
n_iterations: float
The number of training iterations the algorithm will tune the weights for.
learning_rate: float
The step length that will be used when updating the weights.
gradient_descent: boolean
True or false depending if gradient descent should be used when training. If
false then we use batch optimization by least squares.
"""
def __init__(self, degree, n_iterations=3000, learning_rate=0.001, gradient_descent=True):
self.degree = degree
super(PolynomialRegression, self).__init__(reg_factor=0, n_iterations=n_iterations, \
learning_rate=learning_rate, gradient_descent=gradient_descent)
def fit(self, X, y):
X_transformed = polynomial_features(X, degree=self.degree)
super(PolynomialRegression, self).fit(X_transformed, y)
def predict(self, X):
X_transformed = polynomial_features(X, degree=self.degree)
return super(PolynomialRegression, self).predict(X_transformed)
class RidgeRegression(Regression):
"""Also referred to as Tikhonov regularization. Linear regression model with a regularization factor.
Model that tries to balance the fit of the model with respect to the training data and the complexity
of the model. A large regularization factor with decreases the variance of the model.
Parameters:
-----------
reg_factor: float
The factor that will determine the amount of regularization and feature
shrinkage.
n_iterations: float
The number of training iterations the algorithm will tune the weights for.
learning_rate: float
The step length that will be used when updating the weights.
gradient_descent: boolean
True or false depending if gradient descent should be used when training. If
false then we use batch optimization by least squares.
"""
def __init__(self, reg_factor, n_iterations=1000, learning_rate=0.001, gradient_descent=True):
super(RidgeRegression, self).__init__(reg_factor, n_iterations, learning_rate, gradient_descent)
def regularization(self):
return self.reg_factor * self.w.T.dot(self.w)
def regularization_gradient(self):
return self.reg_factor * self.w
class LassoRegression(Regression):
"""Linear regression model with a regularization factor which does both variable selection
and regularization. Model that tries to balance the fit of the model with respect to the training
data and the complexity of the model. A large regularization factor with decreases the variance of
the model and do para.
Parameters:
-----------
degree: int
The power of the polynomial that the independent variable X will be transformed to.
reg_factor: float
The factor that will determine the amount of regularization and feature
shrinkage.
n_iterations: float
The number of training iterations the algorithm will tune the weights for.
learning_rate: float
The step length that will be used when updating the weights.
gradient_descent: boolean
True or false depending if gradient descent should be used when training. If
false then we use batch optimization by least squares.
"""
def __init__(self, degree, reg_factor, n_iterations=3000, learning_rate=0.01, gradient_descent=True):
self.degree = degree
super(LassoRegression, self).__init__(reg_factor, n_iterations, learning_rate, gradient_descent)
def fit(self, X, y):
X_transformed = normalize(polynomial_features(X, degree=self.degree))
super(LassoRegression, self).fit(X_transformed, y)
def predict(self, X):
X_transformed = normalize(polynomial_features(X, degree=self.degree))
return super(LassoRegression, self).predict(X_transformed)
def regularization(self):
return self.reg_factor * len(self.w)
def regularization_gradient(self):
return self.reg_factor * np.sign(self.w)
class PolynomialRidgeRegression(Regression):
"""Similar to regular ridge regression except that the data is transformed to allow
for polynomial regression.
Parameters:
-----------
degree: int
The power of the polynomial that the independent variable X will be transformed to.
reg_factor: float
The factor that will determine the amount of regularization and feature
shrinkage.
n_iterations: float
The number of training iterations the algorithm will tune the weights for.
learning_rate: float
The step length that will be used when updating the weights.
gradient_descent: boolean
True or false depending if gradient descent should be used when training. If
false then we use batch optimization by least squares.
"""
def __init__(self, degree, reg_factor, n_iterations=3000, learning_rate=0.01, gradient_descent=True):
self.degree = degree
super(PolynomialRidgeRegression, self).__init__(reg_factor, n_iterations, learning_rate, gradient_descent)
def fit(self, X, y):
X_transformed = normalize(polynomial_features(X, degree=self.degree))
super(PolynomialRidgeRegression, self).fit(X_transformed, y)
def predict(self, X):
X_transformed = normalize(polynomial_features(X, degree=self.degree))
return super(PolynomialRidgeRegression, self).predict(X_transformed)
def regularization(self):
return self.reg_factor * self.w.T.dot(self.w)
def regularization_gradient(self):
return self.reg_factor * self.w
| 43.853211
| 114
| 0.688808
| 1,249
| 9,560
| 5.123299
| 0.152922
| 0.070323
| 0.018753
| 0.021878
| 0.710736
| 0.706829
| 0.700109
| 0.676199
| 0.63463
| 0.63088
| 0
| 0.007566
| 0.239644
| 9,560
| 217
| 115
| 44.0553
| 0.872747
| 0.458787
| 0
| 0.438202
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.258427
| false
| 0
| 0.044944
| 0.089888
| 0.505618
| 0.011236
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
1c6af58062814e8e1f7ca3d01243af21515d0eb4
| 52
|
py
|
Python
|
bluesky_widgets/utils/event.py
|
danielballan/bluesky-widgets
|
cab633b03b71229b1ca27659d6fc81de914f2a72
|
[
"BSD-3-Clause"
] | 4
|
2020-07-16T20:51:10.000Z
|
2021-08-04T20:21:50.000Z
|
bluesky_widgets/utils/event.py
|
danielballan/stream-widgets
|
8f5e8cec0cd08f6f3e6e17a3467818c85e643f56
|
[
"BSD-3-Clause"
] | 124
|
2020-07-17T12:21:13.000Z
|
2022-03-30T13:44:31.000Z
|
bluesky_widgets/utils/event.py
|
danielballan/stream-widgets
|
8f5e8cec0cd08f6f3e6e17a3467818c85e643f56
|
[
"BSD-3-Clause"
] | 18
|
2020-07-16T20:36:45.000Z
|
2021-09-12T17:33:43.000Z
|
from bluesky_live.event import * # noqa: F401,F403
| 26
| 51
| 0.75
| 8
| 52
| 4.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.136364
| 0.153846
| 52
| 1
| 52
| 52
| 0.727273
| 0.288462
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
1c8be3d7397ebbb315f532c7fc104dec64b867a1
| 993
|
py
|
Python
|
src/stackoverflow/61333922/test_myclass.py
|
mrdulin/python-codelab
|
3d960a14a96b3a673b7dc2277d202069b1f8e778
|
[
"MIT"
] | null | null | null |
src/stackoverflow/61333922/test_myclass.py
|
mrdulin/python-codelab
|
3d960a14a96b3a673b7dc2277d202069b1f8e778
|
[
"MIT"
] | null | null | null |
src/stackoverflow/61333922/test_myclass.py
|
mrdulin/python-codelab
|
3d960a14a96b3a673b7dc2277d202069b1f8e778
|
[
"MIT"
] | 3
|
2020-02-19T08:02:04.000Z
|
2021-06-08T13:27:51.000Z
|
import unittest
from unittest import mock
import serial
import myclass
class TestMyClass(unittest.TestCase):
@mock.patch('myclass.serial.Serial')
def test_do_something_fails_on_bad_write(self, mock_Serial):
serial_instance = mock_Serial()
serial_instance.write.return_value = 1
want = -1
dummy_data = b'123'
c = myclass.MyClass('COM8', 115200)
got = c.do_something(dummy_data)
self.assertEqual(got, want)
serial_instance.write.assert_called_once_with(b'123')
@mock.patch('myclass.serial.Serial')
def test_do_something_success(self, mock_Serial):
serial_instance = mock_Serial()
serial_instance.write.return_value = 3
want = 0
dummy_data = b'123'
c = myclass.MyClass('COM8', 115200)
got = c.do_something(dummy_data)
self.assertEqual(got, want)
serial_instance.write.assert_called_once_with(b'123')
if __name__ == '__main__':
unittest.main()
| 30.090909
| 64
| 0.676737
| 128
| 993
| 4.929688
| 0.335938
| 0.114105
| 0.101426
| 0.152139
| 0.763867
| 0.763867
| 0.763867
| 0.763867
| 0.763867
| 0.618067
| 0
| 0.038911
| 0.223565
| 993
| 32
| 65
| 31.03125
| 0.779507
| 0
| 0
| 0.518519
| 0
| 0
| 0.070493
| 0.042296
| 0
| 0
| 0
| 0
| 0.148148
| 1
| 0.074074
| false
| 0
| 0.148148
| 0
| 0.259259
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
1c910f205bbd6603e3182b0b28117d36007747ef
| 70
|
py
|
Python
|
canbeta/child/model/__init__.py
|
FROM-THE-EARTH/canbeta
|
d41cf1b3dd926c5144dc5086f42943a0594c478b
|
[
"MIT"
] | 1
|
2021-06-27T10:50:37.000Z
|
2021-06-27T10:50:37.000Z
|
canbeta/child/model/__init__.py
|
FROM-THE-EARTH/canbeta
|
d41cf1b3dd926c5144dc5086f42943a0594c478b
|
[
"MIT"
] | 5
|
2021-06-19T08:33:47.000Z
|
2021-06-19T08:59:30.000Z
|
canbeta/child/model/__init__.py
|
FROM-THE-EARTH/canbeta
|
d41cf1b3dd926c5144dc5086f42943a0594c478b
|
[
"MIT"
] | null | null | null |
from can09.child.model.child_logging_model import ChildLoggingModel
| 17.5
| 67
| 0.871429
| 9
| 70
| 6.555556
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.03125
| 0.085714
| 70
| 3
| 68
| 23.333333
| 0.890625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
1c996484af2e4531291c90bf60187267b05b60c0
| 151,175
|
py
|
Python
|
openmdao/core/tests/test_group.py
|
Dakror/OpenMDAO
|
3650622e0e96bed6979991bd096186c85050738f
|
[
"Apache-2.0"
] | 2
|
2015-11-28T20:25:59.000Z
|
2017-02-14T09:14:19.000Z
|
openmdao/core/tests/test_group.py
|
Dakror/OpenMDAO
|
3650622e0e96bed6979991bd096186c85050738f
|
[
"Apache-2.0"
] | 1
|
2020-07-20T16:19:35.000Z
|
2020-07-20T16:19:35.000Z
|
openmdao/core/tests/test_group.py
|
Dakror/OpenMDAO
|
3650622e0e96bed6979991bd096186c85050738f
|
[
"Apache-2.0"
] | 1
|
2015-11-29T22:09:58.000Z
|
2015-11-29T22:09:58.000Z
|
"""
Unit tests for Group.
"""
import itertools
import unittest
import warnings
import numpy as np
try:
from parameterized import parameterized
except ImportError:
from openmdao.utils.assert_utils import SkipParameterized as parameterized
import openmdao.api as om
from openmdao.test_suite.components.sellar import SellarDis2
from openmdao.utils.mpi import MPI, multi_proc_exception_check
from openmdao.utils.assert_utils import assert_near_equal, assert_warning
from openmdao.utils.logger_utils import TestLogger
from openmdao.utils.general_utils import ignore_errors_context, reset_warning_registry
from openmdao.utils.name_maps import name2abs_names
try:
from openmdao.vectors.petsc_vector import PETScVector
except ImportError:
PETScVector = None
arr_order_1x1 = np.array([1, 2, 3, 4])
arr_2x4 = np.array([[0, 1, 2, 3], [10, 11, 12, 13]])
arr_order_3x3 = np.array([[1, 2, 3], [1, 2, 3], [1, 2, 3]])
arr_order_4x4 = np.array([[1, 2, 3, 4], [1, 2, 3, 4], [1, 2, 3, 4], [1, 2, 3, 4]])
arr_large_4x4 = np.array([[0, 1, 2, 3], [10, 11, 12, 13], [20, 21, 22, 23], [30, 31, 32, 33]])
class SimpleGroup(om.Group):
def setup(self):
self.add_subsystem('comp1', om.IndepVarComp('x', 5.0))
self.add_subsystem('comp2', om.ExecComp('b=2*a'))
self.connect('comp1.x', 'comp2.a')
class BranchGroup(om.Group):
def setup(self):
b1 = self.add_subsystem('Branch1', om.Group())
g1 = b1.add_subsystem('G1', om.Group())
g2 = g1.add_subsystem('G2', om.Group())
g2.add_subsystem('comp1', om.ExecComp('b=2.0*a', a=3.0, b=6.0))
b2 = self.add_subsystem('Branch2', om.Group())
g3 = b2.add_subsystem('G3', om.Group())
g3.add_subsystem('comp2', om.ExecComp('b=3.0*a', a=4.0, b=12.0))
class SetOrderGroup(om.Group):
def setup(self):
self.add_subsystem('C1', om.ExecComp('y=2.0*x'))
self.add_subsystem('C2', om.ExecComp('y=2.0*x'))
self.add_subsystem('C3', om.ExecComp('y=2.0*x'))
self.set_order(['C1', 'C3', 'C2'])
self.connect('C1.y', 'C3.x')
self.connect('C3.y', 'C2.x')
class ReportOrderComp(om.ExplicitComponent):
def __init__(self, order_list):
super().__init__()
self._order_list = order_list
def setup(self):
self.add_input('x', 0.0)
self.add_output('y', 0.0)
def compute(self, inputs, outputs):
self._order_list.append(self.pathname)
class TestSubsystemConfigError(unittest.TestCase):
def test_add_subsystem_error_on_config(self):
class SimpleGroup(om.Group):
def setup(self):
self.add_subsystem('comp1', om.IndepVarComp('x', 5.0))
self.add_subsystem('comp2', om.ExecComp('b=2*a'))
self.connect('comp1.x', 'comp2.a')
def configure(self):
self.add_subsystem('comp3', om.IndepVarComp('y', 10.0))
top = om.Problem(model=SimpleGroup())
with self.assertRaises(RuntimeError) as cm:
top.setup()
self.assertEqual(str(cm.exception),
"<model> <class SimpleGroup>: Cannot call add_subsystem in the configure method")
class SlicerComp(om.ExplicitComponent):
def setup(self):
self.add_input('x', np.ones(4))
self.add_output('y', 1.0)
def compute(self, inputs, outputs):
outputs['y'] = np.sum(inputs['x'])**2.0
class TestGroup(unittest.TestCase):
def test_add_subsystem_class(self):
p = om.Problem()
try:
p.model.add_subsystem('comp', om.IndepVarComp)
except TypeError as err:
self.assertEqual(str(err), "Group: Subsystem 'comp' should be an instance, "
"but a IndepVarComp class object was found.")
else:
self.fail('Exception expected.')
def test_same_sys_name(self):
"""Test error checking for the case where we add two subsystems with the same name."""
p = om.Problem()
p.model.add_subsystem('comp1', om.IndepVarComp('x', 5.0))
p.model.add_subsystem('comp2', om.ExecComp('b=2*a'))
try:
p.model.add_subsystem('comp2', om.ExecComp('b=2*a'))
except Exception as err:
self.assertEqual(str(err), "Group: Subsystem name 'comp2' is already used.")
else:
self.fail('Exception expected.')
def test_inner_connect_w_extern_promote(self):
p = om.Problem()
g = p.model.add_subsystem('g', om.Group(), promotes_inputs=['c0.x'])
g.add_subsystem('ivc', om.IndepVarComp('x', 2.))
g.add_subsystem('c0', om.ExecComp('y = 2*x'))
g.connect('ivc.x', 'c0.x')
p.setup()
p.final_setup()
from openmdao.error_checking.check_config import _get_promoted_connected_ins
ins = _get_promoted_connected_ins(p.model)
self.assertEqual(len(ins), 1)
inp, tup = list(ins.items())[0]
in_proms, mans = tup
self.assertEqual(inp, 'g.c0.x')
self.assertEqual(in_proms, ['g'])
self.assertEqual(mans, [('c0.x', 'g')])
def test_inner_connect_w_2extern_promotes(self):
p = om.Problem()
g0 = p.model.add_subsystem('g0', om.Group(), promotes_inputs=['c0.x'])
g = g0.add_subsystem('g', om.Group(), promotes_inputs=['c0.x'])
g.add_subsystem('ivc', om.IndepVarComp('x', 2.))
g.add_subsystem('c0', om.ExecComp('y = 2*x'))
g.connect('ivc.x', 'c0.x')
p.setup()
p.final_setup()
from openmdao.error_checking.check_config import _get_promoted_connected_ins
ins = _get_promoted_connected_ins(p.model)
self.assertEqual(len(ins), 1)
inp, tup = list(ins.items())[0]
in_proms, mans = tup
self.assertEqual(inp, 'g0.g.c0.x')
self.assertEqual(list(sorted(in_proms)), ['g0', 'g0.g'])
self.assertEqual(mans, [('c0.x', 'g0.g')])
def test_double_promote_conns(self):
p = om.Problem()
gouter = p.model.add_subsystem('gouter', om.Group())
gouter.add_subsystem('couter', om.ExecComp('xx = a * 3.'), promotes_outputs=['xx'])
g = gouter.add_subsystem('g', om.Group(), promotes_inputs=[('x', 'xx')])
g.add_subsystem('ivc', om.IndepVarComp('x', 2.), promotes_outputs=['x'])
g.add_subsystem('c0', om.ExecComp('y = 2*x'), promotes_inputs=['x'])
expected = "'gouter' <class Group>: The following inputs have multiple connections: " \
"gouter.g.c0.x from ['gouter.couter.xx', 'gouter.g.ivc.x']"
with self.assertRaises(RuntimeError) as cm:
p.setup()
self.assertEqual(str(cm.exception), expected)
p.model._raise_connection_errors = False
with assert_warning(UserWarning, expected):
p.setup()
def test_double_promote_one_conn(self):
p = om.Problem()
gouter = p.model.add_subsystem('gouter', om.Group())
gouter.add_subsystem('couter', om.ExecComp('xx = a * 3.'))
g = gouter.add_subsystem('g', om.Group(), promotes_inputs=[('x', 'xx')])
g.add_subsystem('ivc', om.IndepVarComp('x', 2.), promotes_outputs=['x'])
g.add_subsystem('c0', om.ExecComp('y = 2*x'), promotes_inputs=['x'])
p.setup()
self.assertEqual(p.model._conn_global_abs_in2out['gouter.g.c0.x'], 'gouter.g.ivc.x')
def test_hide_group_input(self):
p = om.Problem()
g1 = p.model.add_subsystem('g1', om.Group())
g2 = g1.add_subsystem('g2', om.Group(), promotes=['g3.c1.x']) # make g2 disappear using promotes
g3 = g2.add_subsystem('g3', om.Group())
c1 = g3.add_subsystem('c1', om.ExecComp('y=2.*x', x=2.))
g3_ = g1.add_subsystem('g3', om.Group(), promotes=['x']) # second g3, but directly under g1
c1_ = g3_.add_subsystem('c1', om.ExecComp('y=3.*x', x=3.), promotes=['x'])
with self.assertRaises(Exception) as cm:
p.setup()
self.assertEqual(cm.exception.args[0], f"{p.model.msginfo}: Absolute variable name 'g1.g3.c1.x'"
" is masked by a matching promoted name. Try"
" promoting to a different name. This can be caused"
" by promoting '*' at group level or promoting using"
" dotted names.")
def test_hide_group_output(self):
p = om.Problem()
g1 = p.model.add_subsystem('g1', om.Group())
g2 = g1.add_subsystem('g2', om.Group(), promotes=['g3.c1.y']) # make g2 disappear using promotes
g3 = g2.add_subsystem('g3', om.Group())
c1 = g3.add_subsystem('c1', om.ExecComp('y=2.*x', x=2.))
g3_ = g1.add_subsystem('g3', om.Group(), promotes=['y']) # second g3, but directly under g1
c1_ = g3_.add_subsystem('c1', om.ExecComp('y=3.*x', x=3.), promotes=['y'])
with self.assertRaises(Exception) as cm:
p.setup()
self.assertEqual(cm.exception.args[0], f"{p.model.msginfo}: Absolute variable name 'g1.g3.c1.y'"
" is masked by a matching promoted name. Try"
" promoting to a different name. This can be caused"
" by promoting '*' at group level or promoting using"
" dotted names.")
def test_invalid_subsys_name(self):
p = om.Problem()
# name cannot start with an underscore
with self.assertRaises(Exception) as err:
p.model.add_subsystem('_bad_name', om.Group())
self.assertEqual(str(err.exception),
"Group: '_bad_name' is not a valid sub-system name.")
def test_subsys_attributes(self):
p = om.Problem()
class MyGroup(om.Group):
def setup(self):
# two subsystems added during setup
self.add_subsystem('comp1', om.ExecComp('b=2.0*a', a=3.0, b=6.0))
self.add_subsystem('comp2', om.ExecComp('b=3.0*a', a=4.0, b=12.0))
# subsystems become attributes
my_group = p.model.add_subsystem('gg', MyGroup())
self.assertTrue(p.model.gg is my_group)
# after calling setup(), MyGroup's subsystems are also attributes
p.setup()
self.assertTrue(hasattr(p.model.gg, 'comp1'))
self.assertTrue(hasattr(p.model.gg, 'comp2'))
# calling setup() again doesn't break anything
p.setup()
self.assertTrue(p.model.gg is my_group)
self.assertTrue(hasattr(p.model.gg, 'comp1'))
self.assertTrue(hasattr(p.model.gg, 'comp2'))
# 'name', 'pathname', 'comm' and 'options' are reserved names
p = om.Problem()
for reserved in ['name', 'pathname', 'comm', 'options']:
with self.assertRaises(Exception) as err:
p.model.add_subsystem(reserved, om.Group())
self.assertEqual(str(err.exception),
"Group: Can't add subsystem '%s' because an attribute with that name already exits." %
reserved)
def test_group_promotes(self):
"""Promoting a single variable."""
p = om.Problem()
ivc = om.IndepVarComp()
ivc.add_output('a', 2.0)
ivc.add_output('x', 5.0)
p.model.add_subsystem('comp1', ivc, promotes_outputs=['x'])
p.model.add_subsystem('comp2', om.ExecComp('y=2*x'), promotes_inputs=['x'])
p.setup()
p.set_solver_print(level=0)
p.run_model()
self.assertEqual(p['comp1.a'], 2)
self.assertEqual(p['x'], 5)
self.assertEqual(p['comp2.y'], 10)
def test_group_renames(self):
p = om.Problem()
p.model.add_subsystem('comp1', om.IndepVarComp('x', 5.0),
promotes_outputs=[('x', 'foo')])
p.model.add_subsystem('comp2', om.ExecComp('y=2*foo'), promotes_inputs=['foo'])
p.setup()
p.set_solver_print(level=0)
p.run_model()
self.assertEqual(p['foo'], 5)
self.assertEqual(p['comp2.y'], 10)
def test_group_renames_errors_single_string(self):
p = om.Problem()
with self.assertRaises(Exception) as err:
p.model.add_subsystem('comp1', om.IndepVarComp('x', 5.0),
promotes_outputs='x')
self.assertEqual(str(err.exception),
"Group: promotes must be an iterator of strings and/or tuples.")
def test_group_renames_errors_not_found(self):
p = om.Problem()
p.model.add_subsystem('comp1', om.IndepVarComp('x', 5.0),
promotes_outputs=[('xx', 'foo')])
p.model.add_subsystem('comp2', om.ExecComp('y=2*foo'), promotes_inputs=['foo'])
with self.assertRaises(Exception) as err:
p.setup()
self.assertEqual(str(err.exception),
"'comp1' <class IndepVarComp>: 'promotes_outputs' failed to find any matches for "
"the following names or patterns: ['xx'].")
def test_group_renames_errors_bad_tuple(self):
p = om.Problem()
p.model.add_subsystem('comp1', om.IndepVarComp('x', 5.0),
promotes_outputs=[('x', 'foo', 'bar')])
p.model.add_subsystem('comp2', om.ExecComp('y=2*foo'), promotes_inputs=['foo'])
with self.assertRaises(Exception) as err:
p.setup()
self.assertEqual(str(err.exception),
"when adding subsystem 'comp1', entry '('x', 'foo', 'bar')' "
"is not a string or tuple of size 2")
def test_group_promotes_multiple(self):
"""Promoting multiple variables."""
p = om.Problem()
ivc = om.IndepVarComp()
ivc.add_output('a', 2.0)
ivc.add_output('x', 5.0)
p.model.add_subsystem('comp1', ivc, promotes_outputs=['a', 'x'])
p.model.add_subsystem('comp2', om.ExecComp('y=2*x'),
promotes_inputs=['x'])
p.setup()
p.set_solver_print(level=0)
p.run_model()
self.assertEqual(p['a'], 2)
self.assertEqual(p['x'], 5)
self.assertEqual(p['comp2.y'], 10)
def test_group_promotes_all(self):
"""Promoting all variables with asterisk."""
p = om.Problem()
ivc = om.IndepVarComp()
ivc.add_output('a', 2.0)
ivc.add_output('x', 5.0)
p.model.add_subsystem('comp1', ivc, promotes_outputs=['*'])
p.model.add_subsystem('comp2', om.ExecComp('y=2*x'),
promotes_inputs=['x'])
p.setup()
p.set_solver_print(level=0)
p.run_model()
self.assertEqual(p['a'], 2)
self.assertEqual(p['x'], 5)
self.assertEqual(p['comp2.y'], 10)
def test_group_promotes2(self):
class Sellar(om.Group):
def setup(self):
dv = self.add_subsystem('des_vars', om.IndepVarComp(), promotes=['*'])
dv.add_output('x', 1.0)
dv.add_output('z', np.array([5.0, 2.0]))
self.add_subsystem('d1', SellarDis2(),
promotes_inputs=['y1'], promotes_outputs=['foo'])
self.add_subsystem('d2', SellarDis2())
p = om.Problem()
p.model = Sellar()
with self.assertRaises(Exception) as err:
p.setup()
self.assertEqual(str(err.exception),
"'d1' <class SellarDis2>: 'promotes_outputs' failed to find any matches for "
"the following names or patterns: ['foo'].")
def test_group_nested_conn(self):
"""Example of adding subsystems and issuing connections with nested groups."""
g1 = om.Group()
c1_1 = g1.add_subsystem('comp1', om.IndepVarComp('x', 5.0))
c1_2 = g1.add_subsystem('comp2', om.ExecComp('b=2*a'))
g1.connect('comp1.x', 'comp2.a')
g2 = om.Group()
c2_1 = g2.add_subsystem('comp1', om.ExecComp('b=2*a'))
c2_2 = g2.add_subsystem('comp2', om.ExecComp('b=2*a'))
g2.connect('comp1.b', 'comp2.a')
model = om.Group()
model.add_subsystem('group1', g1)
model.add_subsystem('group2', g2)
model.connect('group1.comp2.b', 'group2.comp1.a')
p = om.Problem(model=model)
p.setup()
c1_1 = p.model.group1.comp1
c1_2 = p.model.group1.comp2
c2_1 = p.model.group2.comp1
c2_2 = p.model.group2.comp2
self.assertEqual(c1_1.name, 'comp1')
self.assertEqual(c1_2.name, 'comp2')
self.assertEqual(c2_1.name, 'comp1')
self.assertEqual(c2_2.name, 'comp2')
c1_1 = p.model.group1.comp1
c1_2 = p.model.group1.comp2
c2_1 = p.model.group2.comp1
c2_2 = p.model.group2.comp2
self.assertEqual(c1_1.name, 'comp1')
self.assertEqual(c1_2.name, 'comp2')
self.assertEqual(c2_1.name, 'comp1')
self.assertEqual(c2_2.name, 'comp2')
s = p.model._get_subsystem('')
self.assertEqual(s, p.model)
p.set_solver_print(level=0)
p.run_model()
self.assertEqual(p['group1.comp1.x'], 5.0)
self.assertEqual(p['group1.comp2.b'], 10.0)
self.assertEqual(p['group2.comp1.b'], 20.0)
self.assertEqual(p['group2.comp2.b'], 40.0)
def test_reused_output_promoted_names(self):
prob = om.Problem()
prob.model.add_subsystem('px1', om.IndepVarComp('x1', 100.0))
G1 = prob.model.add_subsystem('G1', om.Group())
G1.add_subsystem("C1", om.ExecComp("y=2.0*x"), promotes=['y'])
G1.add_subsystem("C2", om.ExecComp("y=2.0*x"), promotes=['y'])
msg = r"Output name 'y' refers to multiple outputs: \['G1.C1.y', 'G1.C2.y'\]."
with self.assertRaisesRegex(Exception, msg):
prob.setup()
def test_unconnected_input_units_no_mismatch(self):
p = om.Problem()
p.model.add_subsystem('comp1', om.ExecComp('y=sum(x)',
x={'value': np.zeros(5), 'units': 'ft'},
y={'units': 'inch'}), promotes=['x'])
p.model.add_subsystem('comp2', om.ExecComp('y=sum(x)',
x={'value': np.zeros(5), 'units': 'ft'},
y={'units': 'inch'}), promotes=['x'])
p.setup()
p['comp2.x'] = np.ones(5)
p.run_model()
np.testing.assert_allclose(p['comp1.y'], 5.)
np.testing.assert_allclose(p['comp2.y'], 5.)
def test_unconnected_input_units_mismatch(self):
p = om.Problem()
p.model.add_subsystem('comp1', om.ExecComp('y=sum(x)',
x={'value': np.zeros(5), 'units': 'inch'},
y={'units': 'inch'}), promotes=['x'])
p.model.add_subsystem('comp2', om.ExecComp('y=sum(x)',
x={'value': np.zeros(5), 'units': 'ft'},
y={'units': 'inch'}), promotes=['x'])
p.model.set_input_defaults('x', units='ft')
p.setup()
p['comp2.x'] = np.ones(5)
p.run_model()
np.testing.assert_allclose(p['comp1.y'], 60.)
np.testing.assert_allclose(p['comp2.y'], 5.)
def test_double_src_indices(self):
class SlicerComp(om.ExplicitComponent):
def setup(self):
self.add_input('x', np.ones(3), src_indices=[0, 1, 2])
p = om.Problem()
p.model.add_subsystem('indep', om.IndepVarComp('x', np.ones(5)))
p.model.add_subsystem('C1', SlicerComp())
p.model.connect('indep.x', 'C1.x', src_indices=[1, 0, 2])
with self.assertRaises(Exception) as context:
p.setup()
self.assertEqual(str(context.exception),
"<model> <class Group>: src_indices has been defined in both "
"connect('indep.x', 'C1.x') and add_input('C1.x', ...).")
def test_incompatible_src_indices_error(self):
class ControlInterpComp(om.ExplicitComponent):
def setup(self):
self.add_output('x', shape=(3, 1))
class CollocationComp(om.ExplicitComponent):
def setup(self):
self.add_input('x', shape=(1, 2))
class Phase(om.Group):
def setup(self):
self.add_subsystem('comp1', ControlInterpComp())
self.add_subsystem('comp2', CollocationComp())
self.connect('comp1.x', 'comp2.x', src_indices=[1])
p = om.Problem()
p.model.add_subsystem('phase', Phase())
msg = "'phase' <class Phase>: src_indices shape (1,) does not match phase.comp2.x shape (1, 2)."
with self.assertRaises(ValueError) as context:
p.setup()
self.assertEqual(str(context.exception), msg)
p.model._raise_connection_errors = False
with assert_warning(UserWarning, msg):
p.setup()
def test_connect_to_flat_array_with_slice(self):
class SlicerComp(om.ExplicitComponent):
def setup(self):
self.add_input('x', np.ones((12,)))
self.add_output('y', 1.0)
def compute(self, inputs, outputs):
outputs['y'] = np.sum(inputs['x']) ** 2.0
p = om.Problem()
p.model.add_subsystem('indep', om.IndepVarComp('x', arr_large_4x4))
p.model.add_subsystem('row123_comp', SlicerComp())
idxs = np.array([0, 2, 3], dtype=int)
p.model.connect('indep.x', 'row123_comp.x', src_indices=om.slicer[idxs, ...])
p.setup()
p.run_model()
assert_near_equal(p['row123_comp.x'], arr_large_4x4[(0, 2, 3), ...].ravel())
assert_near_equal(p['row123_comp.y'], np.sum(arr_large_4x4[(0, 2, 3), ...]) ** 2.0)
def test_connect_to_flat_src_indices_with_slice_user_warning(self):
class SlicerComp(om.ExplicitComponent):
def setup(self):
self.add_input('x', np.ones((12,)))
self.add_output('y', 1.0)
def compute(self, inputs, outputs):
outputs['y'] = np.sum(inputs['x']) ** 2.0
p = om.Problem()
p.model.add_subsystem('indep', om.IndepVarComp('x', arr_large_4x4))
p.model.add_subsystem('row123_comp', SlicerComp())
idxs = np.array([0, 2, 3], dtype=int)
p.model.connect('indep.x', 'row123_comp.x', src_indices=om.slicer[idxs, ...],
flat_src_indices=True)
msg = "<model> <class Group>: Connection from 'indep.x' to 'row123_comp.x' was added with slice src_indices, so flat_src_indices is ignored."
with assert_warning(UserWarning, msg):
p.setup()
p.run_model()
assert_near_equal(p['row123_comp.x'], arr_large_4x4[(0, 2, 3), ...].ravel())
assert_near_equal(p['row123_comp.y'], np.sum(arr_large_4x4[(0, 2, 3), ...]) ** 2.0)
def test_connect_to_flat_array(self):
class SlicerComp(om.ExplicitComponent):
def setup(self):
self.add_input('x', np.ones((4,)))
self.add_output('y', 1.0)
def compute(self, inputs, outputs):
outputs['y'] = np.sum(inputs['x'])
p = om.Problem()
p.model.add_subsystem('indep', om.IndepVarComp('x', val=arr_large_4x4))
p.model.add_subsystem('trace_comp', SlicerComp())
idxs = np.array([0, 5, 10, 15], dtype=int)
p.model.connect('indep.x', 'trace_comp.x', src_indices=idxs, flat_src_indices=True)
p.setup()
p.run_model()
assert_near_equal(p['trace_comp.x'], np.diag(arr_large_4x4))
assert_near_equal(p['trace_comp.y'], np.sum(np.diag(arr_large_4x4)))
def test_om_slice_in_connect(self):
p = om.Problem()
p.model.add_subsystem('indep', om.IndepVarComp('x', arr_order_4x4))
p.model.add_subsystem('C1', SlicerComp())
p.model.connect('indep.x', 'C1.x', src_indices=om.slicer[:, 1])
p.setup()
p.run_model()
assert_near_equal(p['C1.x'], np.array([2, 2, 2, 2]))
p = om.Problem()
p.model.add_subsystem('indep', om.IndepVarComp('x', arr_order_4x4))
p.model.add_subsystem('C1', SlicerComp())
p.model.connect('indep.x', 'C1.x', src_indices=om.slicer[:, 1], flat_src_indices=True)
p.setup()
p.run_model()
assert_near_equal(p['C1.x'], np.array([2, 2, 2, 2]))
def test_om_slice_in_promotes(self):
p = om.Problem()
model = p.model
model.add_subsystem('indep', om.IndepVarComp('a', arr_order_3x3), promotes=['*'])
model.add_subsystem('comp1', om.ExecComp('b=2*a', a=np.ones(3), b=np.ones(3)))
model.promotes('comp1', inputs=['a'], src_indices=om.slicer[:, 1])
p.setup()
p.run_model()
assert_near_equal(p['comp1.a'], [2, 2, 2])
p = om.Problem()
model = p.model
model.add_subsystem('indep', om.IndepVarComp('a', arr_order_3x3), promotes=['*'])
model.add_subsystem('comp1', om.ExecComp('b=2*a', a=np.ones(3), b=np.ones(3)))
model.promotes('comp1', inputs=['a'], src_indices=om.slicer[:, 1], flat_src_indices=True)
p.setup()
p.run_model()
assert_near_equal(p['comp1.a'], [2, 2, 2])
def test_desvar_indice_slice(self):
p = om.Problem()
p.model.add_subsystem('indep', om.IndepVarComp('x', arr_order_1x1))
p.model.add_subsystem('C1', SlicerComp())
p.model.connect('indep.x', 'C1.x')
p.model.add_design_var('indep.x', indices=om.slicer[2:])
p.model.add_objective('C1.y')
p.setup()
p.run_model()
assert_near_equal(arr_order_1x1[p.model._design_vars['indep.x']['indices']], np.array([3., 4]))
def test_om_slice_in_add_response(self):
p = om.Problem()
p.model.add_subsystem('indep', om.IndepVarComp('x', arr_order_1x1))
p.model.add_subsystem('C1', SlicerComp())
p.model.connect('indep.x', 'C1.x')
p.model.add_response('indep.x', type_='con', indices=om.slicer[2:])
p.model.add_objective('C1.y')
p.setup()
p.run_model()
assert_near_equal(arr_order_1x1[tuple(p.model._responses['indep.x']['indices'])], np.array([3, 4]))
self.assertTrue(p.model._responses['indep.x']['indices'][0], slice(2, None, None))
def test_om_slice_in_add_constraint(self):
p = om.Problem()
p.model.add_subsystem('indep', om.IndepVarComp('x', arr_order_1x1))
p.model.add_subsystem('C1', SlicerComp())
p.model.connect('indep.x', 'C1.x')
p.model.add_constraint('indep.x', indices=om.slicer[2:])
p.model.add_objective('C1.y')
p.setup()
p.run_model()
assert_near_equal(arr_order_1x1[tuple(p.model._responses['indep.x']['indices'])], np.array([3, 4]))
self.assertTrue(p.model._responses['indep.x']['indices'][0], slice(2, None, None))
def test_om_slice_in_add_input(self):
class SlicerComp(om.ExplicitComponent):
def setup(self):
self.add_input('x', np.ones(4), src_indices=om.slicer[:, 2])
p = om.Problem()
p.model.add_subsystem('indep', om.IndepVarComp('x', arr_order_4x4))
p.model.add_subsystem('C1', SlicerComp())
p.model.connect('indep.x', 'C1.x')
p.setup()
p.run_model()
assert_near_equal(p['C1.x'], np.array([3, 3, 3, 3]))
def test_om_slice_negative_stop(self):
class SlicerComp(om.ExplicitComponent):
def setup(self):
self.add_input('x', np.ones(4), src_indices=om.slicer[:, -1])
p = om.Problem()
p.model.add_subsystem('indep', om.IndepVarComp('x', arr_order_4x4))
p.model.add_subsystem('C1', SlicerComp())
p.model.connect('indep.x', 'C1.x')
p.setup()
p.run_model()
assert_near_equal(p['C1.x'], np.array([4, 4, 4, 4]))
def test_om_slice_3d(self):
class SlicerComp(om.ExplicitComponent):
def setup(self):
self.add_input('x', np.ones(4), src_indices=om.slicer[:, 1, 2])
arr = np.arange(64, dtype=int).reshape(4, 4, 4)
p = om.Problem()
p.model.add_subsystem('indep', om.IndepVarComp('x', arr))
p.model.add_subsystem('C1', SlicerComp())
p.model.connect('indep.x', 'C1.x')
p.setup()
p.run_model()
assert_near_equal(p['C1.x'], np.array([6, 22, 38, 54]))
def test_om_slice_with_ellipsis_in_connect(self):
p = om.Problem()
p.model.add_subsystem('indep', om.IndepVarComp('x', arr_large_4x4))
p.model.add_subsystem('row1_comp', SlicerComp())
p.model.add_subsystem('row4_comp', SlicerComp())
p.model.connect('indep.x', 'row1_comp.x', src_indices=om.slicer[0, ...])
p.model.connect('indep.x', 'row4_comp.x', src_indices=om.slicer[3, ...])
p.setup()
p.run_model()
assert_near_equal(p['row1_comp.x'], arr_large_4x4[0, ...])
assert_near_equal(p['row4_comp.x'], arr_large_4x4[3, ...])
def test_om_slice_4D_with_ellipsis(self):
class SlicerComp(om.ExplicitComponent):
def setup(self):
self.add_input('x', np.ones(shape=(5, 3)))
# Connect
p = om.Problem()
arr = np.random.randint(5, size=(3, 5, 3, 2))
p.model.add_subsystem('indep', om.IndepVarComp('x', arr))
p.model.add_subsystem('row1_comp', SlicerComp())
p.model.add_subsystem('row4_comp', SlicerComp())
p.model.connect('indep.x', 'row1_comp.x', src_indices=om.slicer[1, ..., 1])
p.model.connect('indep.x', 'row4_comp.x', src_indices=om.slicer[2, ..., 1])
p.setup()
p.run_model()
assert_near_equal(p.get_val('row1_comp.x'), arr[1, ..., 1])
assert_near_equal(p.get_val('row4_comp.x'), arr[2, ..., 1])
# Promotes
p = om.Problem()
arr = np.random.randint(5, size=(3, 5, 3, 2))
p.model.add_subsystem('indep', om.IndepVarComp('a', arr), promotes=['*'])
p.model.add_subsystem('row1_comp', om.ExecComp('b=2*a', a=np.ones((5,3)), b=np.ones((5,3))))
p.model.promotes('row1_comp', inputs=['a'], src_indices=om.slicer[1, ..., 1])
p.setup()
p.run_model()
assert_near_equal(p.get_val('row1_comp.a'), arr[1, ..., 1])
# Design Variable
p = om.Problem()
arr = np.random.randint(5, size=(3, 5, 3, 2))
p.model.add_subsystem('indep', om.IndepVarComp('a', arr))
p.model.add_subsystem('row1_comp', om.ExecComp('b=2*a', a=np.ones((3,5,3)), b=np.ones((3,5,3))))
p.model.connect('indep.a', 'row1_comp.a', src_indices=om.slicer[..., 1])
p.model.add_design_var('indep.a', indices=om.slicer[1, ..., 1])
p.setup()
p.run_model()
assert_near_equal(arr[p.model._design_vars['indep.a']['indices']], arr[1, ..., 1])
# Response
p = om.Problem()
arr = np.random.randint(5, size=(3, 5, 3, 2))
p.model.add_subsystem('indep', om.IndepVarComp('a', arr))
p.model.add_subsystem('row1_comp', om.ExecComp('b=2*a', a=np.ones((3,5,3)), b=np.ones((3,5,3))))
p.model.connect('indep.a', 'row1_comp.a', src_indices=om.slicer[..., 1])
p.model.add_response('indep.a', type_='con', indices=om.slicer[1, ..., 1])
p.setup()
p.run_model()
assert_near_equal(arr[tuple(p.model._responses['indep.a']['indices'])], arr[1, ..., 1])
# Constraint
p = om.Problem()
arr = np.random.randint(5, size=(3, 5, 3, 2))
p.model.add_subsystem('indep', om.IndepVarComp('a', arr))
p.model.add_subsystem('row1_comp', om.ExecComp('b=2*a', a=np.ones((3,5,3)), b=np.ones((3,5,3))))
p.model.connect('indep.a', 'row1_comp.a', src_indices=om.slicer[..., 1])
p.model.add_constraint('indep.a', indices=om.slicer[1, ..., 1])
p.setup()
p.run_model()
assert_near_equal(arr[tuple(p.model._responses['indep.a']['indices'])], arr[1, ..., 1])
def test_om_slice_with_ellipsis_in_promotes(self):
p = om.Problem()
model = p.model
model.add_subsystem('indep', om.IndepVarComp('a', arr_large_4x4), promotes=['*'])
model.add_subsystem('comp1', om.ExecComp('b=2*a', a=np.ones(4), b=np.ones(4)))
model.add_subsystem('comp2', om.ExecComp('b=2*a', a=np.ones(4), b=np.ones(4)))
model.add_subsystem('comp3', om.ExecComp('b=2*a', a=np.ones(4), b=np.ones(4)))
model.promotes('comp1', inputs=['a'], src_indices=om.slicer[0, ...])
model.promotes('comp2', inputs=['a'], src_indices=om.slicer[3, ...])
model.promotes('comp3', inputs=['a'], src_indices=om.slicer[..., 3])
p.setup()
p.run_model()
assert_near_equal(p['comp1.a'], np.array([0, 1, 2, 3]))
assert_near_equal(p['comp2.a'], np.array([30, 31, 32, 33]))
assert_near_equal(p['comp3.a'], np.array([ 3, 13, 23, 33]))
def test_om_slice_with_ellipsis_in_desvar(self):
p = om.Problem()
p.model.add_subsystem('indep', om.IndepVarComp('x', arr_2x4))
p.model.add_subsystem('C1', SlicerComp())
p.model.connect('indep.x', 'C1.x', src_indices=om.slicer[1, ...])
p.model.add_design_var('indep.x', indices=om.slicer[1, ...])
p.model.add_objective('C1.y')
p.setup()
p.run_model()
assert_near_equal(arr_2x4[p.model._design_vars['indep.x']['indices']][0], 10)
def test_om_slice_with_ellipsis_in_add_response(self):
p = om.Problem()
p.model.add_subsystem('indep', om.IndepVarComp('x', arr_2x4))
p.model.add_subsystem('C1', SlicerComp())
p.model.connect('indep.x', 'C1.x', src_indices=om.slicer[1, ...])
p.model.add_response('indep.x', type_='con', indices=om.slicer[1, ...])
p.model.add_objective('C1.y')
p.setup()
p.run_model()
assert_near_equal(arr_2x4[tuple(p.model._responses['indep.x']['indices'])],
np.array([10, 11, 12, 13]))
self.assertTrue(p.model._responses['indep.x']['indices'][0], 1)
self.assertTrue(p.model._responses['indep.x']['indices'][1], ...)
def test_om_slice_with_ellipsis_in_add_constraint(self):
p = om.Problem()
p.model.add_subsystem('indep', om.IndepVarComp('x', arr_2x4))
p.model.add_subsystem('C1', SlicerComp())
p.model.connect('indep.x', 'C1.x', src_indices=om.slicer[1, ...])
p.model.add_constraint('indep.x', indices=om.slicer[1, ...])
p.model.add_objective('C1.y')
p.setup()
p.run_model()
assert_near_equal(arr_2x4[tuple(p.model._responses['indep.x']['indices'])],
np.array([10, 11, 12, 13]))
self.assertTrue(p.model._responses['indep.x']['indices'][0], 1)
self.assertTrue(p.model._responses['indep.x']['indices'][1], ...)
def test_om_slice_with_ellipsis_auto_ivc(self):
# Add_constraint
p = om.Problem()
p.model.add_subsystem('C1', SlicerComp(), promotes_inputs=['x'])
p.model.add_constraint('x', indices=om.slicer[1, ...])
p.model.add_objective('C1.y')
p.setup()
p.set_val('x', arr_2x4, indices=om.slicer[1, ...])
p.run_model()
assert_near_equal(arr_2x4[tuple(p.model._responses['x']['indices'])],
np.array([10, 11, 12, 13]))
self.assertTrue(p.model._responses['x']['indices'][0], 1)
self.assertTrue(p.model._responses['x']['indices'][1], ...)
# Add_response
p = om.Problem()
p.model.add_subsystem('C1', SlicerComp(), promotes_inputs=['x'])
p.model.add_response('x', type_='con', indices=om.slicer[1, ...])
p.model.add_objective('C1.y')
p.setup()
p.set_val('x', arr_2x4, indices=om.slicer[1, ...])
p.run_model()
assert_near_equal(arr_2x4[tuple(p.model._responses['x']['indices'])],
np.array([10, 11, 12, 13]))
self.assertTrue(p.model._responses['x']['indices'][0], 1)
self.assertTrue(p.model._responses['x']['indices'][1], ...)
# Add_design_var
p = om.Problem()
p.model.add_subsystem('C1', SlicerComp(), promotes_inputs=['x'])
p.model.add_design_var('x', indices=om.slicer[1, ...])
p.setup()
p.set_val('x', arr_2x4, indices=om.slicer[1, ...])
p.run_model()
assert_near_equal(arr_2x4[p.model._design_vars['x']['indices']],
np.array([10, 11, 12, 13]))
self.assertTrue(p.model._design_vars['x']['indices'][0], 1)
self.assertTrue(p.model._design_vars['x']['indices'][1], ...)
self.assertTrue(p.driver.get_design_var_values()['x'], np.array(11.))
def test_om_slice_with_indices_and_ellipsis_in_connect(self):
class SlicerComp(om.ExplicitComponent):
def setup(self):
self.add_input('x', np.ones((3, 4)))
self.add_output('y', 1.0)
def compute(self, inputs, outputs):
outputs['y'] = np.sum(inputs['x']) ** 2.0
p = om.Problem()
p.model.add_subsystem('indep', om.IndepVarComp('x', arr_large_4x4))
p.model.add_subsystem('row134_comp', SlicerComp())
idxs = np.array([0, 2, 3], dtype=int)
p.model.connect('indep.x', 'row134_comp.x', src_indices=om.slicer[idxs, ...])
p.setup()
p.run_model()
assert_near_equal(p['row134_comp.x'], arr_large_4x4[(0, 2, 3), ...])
assert_near_equal(p['row134_comp.y'], np.sum(arr_large_4x4[(0, 2, 3), ...])**2)
def test_promote_not_found1(self):
p = om.Problem()
p.model.add_subsystem('indep', om.IndepVarComp('x', np.ones(5)),
promotes_outputs=['x'])
p.model.add_subsystem('C1', om.ExecComp('y=x'), promotes_inputs=['x'])
p.model.add_subsystem('C2', om.ExecComp('y=x'), promotes_outputs=['x*'])
with self.assertRaises(Exception) as context:
p.setup()
self.assertEqual(str(context.exception),
"'C2' <class ExecComp>: 'promotes_outputs' failed to find any matches for the "
"following pattern: 'x*'.")
def test_promote_not_found2(self):
p = om.Problem()
p.model.add_subsystem('indep', om.IndepVarComp('x', np.ones(5)),
promotes_outputs=['x'])
p.model.add_subsystem('C1', om.ExecComp('y=x'), promotes_inputs=['x'])
p.model.add_subsystem('C2', om.ExecComp('y=x'), promotes_inputs=['xx'])
with self.assertRaises(Exception) as context:
p.setup()
self.assertEqual(str(context.exception),
"'C2' <class ExecComp>: 'promotes_inputs' failed to find any matches for "
"the following names or patterns: ['xx'].")
def test_promote_not_found3(self):
p = om.Problem()
p.model.add_subsystem('indep', om.IndepVarComp('x', np.ones(5)),
promotes_outputs=['x'])
p.model.add_subsystem('C1', om.ExecComp('y=x'), promotes=['x'])
p.model.add_subsystem('C2', om.ExecComp('y=x'), promotes=['xx'])
with self.assertRaises(Exception) as context:
p.setup()
self.assertEqual(str(context.exception),
"'C2' <class ExecComp>: 'promotes' failed to find any matches for "
"the following names or patterns: ['xx'].")
def test_empty_group(self):
p = om.Problem()
g1 = p.model.add_subsystem('G1', om.Group(), promotes=['*'])
p.setup()
def test_missing_promote_var(self):
p = om.Problem()
indep_var_comp = om.IndepVarComp('z', val=2.)
p.model.add_subsystem('indep_vars', indep_var_comp, promotes=['*'])
p.model.add_subsystem('d1', om.ExecComp("y1=z+bar"),
promotes_inputs=['z', 'foo'])
with self.assertRaises(Exception) as context:
p.setup()
self.assertEqual(str(context.exception),
"'d1' <class ExecComp>: 'promotes_inputs' failed to find any matches for "
"the following names or patterns: ['foo'].")
def test_missing_promote_var2(self):
p = om.Problem()
indep_var_comp = om.IndepVarComp('z', val=2.)
p.model.add_subsystem('indep_vars', indep_var_comp, promotes=['*'])
p.model.add_subsystem('d1', om.ExecComp("y1=z+bar"),
promotes_outputs=['y1', 'blammo', ('bar', 'blah')])
with self.assertRaises(Exception) as context:
p.setup()
self.assertEqual(str(context.exception),
"'d1' <class ExecComp>: 'promotes_outputs' failed to find any matches for "
"the following names or patterns: ['bar', 'blammo'].")
def test_promote_src_indices_nonflat_to_scalars(self):
class MyComp(om.ExplicitComponent):
def setup(self):
self.add_input('x', 1.0, src_indices=[(3, 1)], shape=(1,))
self.add_output('y', 1.0)
def compute(self, inputs, outputs):
outputs['y'] = inputs['x']*2.0
p = om.Problem()
p.model.add_subsystem('indep',
om.IndepVarComp('x', np.arange(12).reshape((4, 3))),
promotes_outputs=['x'])
p.model.add_subsystem('C1', MyComp(), promotes_inputs=['x'])
p.set_solver_print(level=0)
p.setup()
p.run_model()
assert_near_equal(p['C1.x'], 10.)
assert_near_equal(p['C1.y'], 20.)
def test_promote_src_indices_nonflat_error(self):
class MyComp(om.ExplicitComponent):
def setup(self):
self.add_input('x', 1.0, src_indices=[(3, 1)])
self.add_output('y', 1.0)
def compute(self, inputs, outputs):
outputs['y'] = np.sum(inputs['x'])
p = om.Problem()
p.model.add_subsystem('indep',
om.IndepVarComp('x', np.arange(12).reshape((4, 3))),
promotes_outputs=['x'])
p.model.add_subsystem('C1', MyComp(), promotes_inputs=['x'])
with self.assertRaises(Exception) as context:
p.setup()
self.assertEqual(str(context.exception),
"src_indices for 'x' is not flat, so its input shape "
"must be provided. src_indices may contain an extra "
"dimension if the connected source is not flat, making "
"the input shape ambiguous.")
@parameterized.expand(itertools.product(
[((4, 3), [(0, 0), (3, 1), (2, 1), (1, 1)]),
((1, 12), [(0, 0), (0, 10), (0, 7), (0, 4)]),
((12,), [0, 10, 7, 4]),
((12, 1), [(0, 0), (10, 0), (7, 0), (4, 0)])],
[(2, 2), (4,), (4, 1), (1, 4)],
), name_func=lambda f, n, p: 'test_promote_src_indices_'+'_'.join(str(a) for a in p.args))
def test_promote_src_indices_param(self, src_info, tgt_shape):
src_shape, idxvals = src_info
class MyComp(om.ExplicitComponent):
def setup(self):
if len(tgt_shape) == 1:
tshape = None # don't need to set shape if input is flat
sidxs = idxvals
else:
tshape = tgt_shape
sidxs = []
i = 0
for r in range(tgt_shape[0]):
sidxs.append([])
for c in range(tgt_shape[1]):
sidxs[-1].append(idxvals[i])
i += 1
self.add_input('x', np.ones(4).reshape(tgt_shape),
src_indices=sidxs, shape=tshape)
self.add_output('y', 1.0)
def compute(self, inputs, outputs):
outputs['y'] = np.sum(inputs['x'])
p = om.Problem()
p.model.add_subsystem('indep',
om.IndepVarComp('x', np.arange(12).reshape(src_shape)),
promotes_outputs=['x'])
p.model.add_subsystem('C1', MyComp(), promotes_inputs=['x'])
p.set_solver_print(level=0)
p.setup()
p.run_model()
assert_near_equal(p['C1.x'],
np.array([0., 10., 7., 4.]).reshape(tgt_shape))
assert_near_equal(p['C1.y'], 21.)
def test_set_order(self):
order_list = []
prob = om.Problem()
model = prob.model
model.nonlinear_solver = om.NonlinearRunOnce()
model.add_subsystem('C1', ReportOrderComp(order_list), promotes_inputs=['x'])
model.add_subsystem('C2', ReportOrderComp(order_list))
model.add_subsystem('C3', ReportOrderComp(order_list))
model.connect('C1.y', 'C2.x')
model.connect('C2.y', 'C3.x')
prob.set_solver_print(level=0)
self.assertEqual(['C1', 'C2', 'C3'],
[s.name for s, _ in model._static_subsystems_allprocs.values()])
prob.setup()
prob.set_val('x', 1.)
prob.run_model()
self.assertEqual(['C1', 'C2', 'C3'], order_list)
order_list[:] = []
# Big boy rules
model.set_order(['C2', 'C1', 'C3'])
prob.setup()
prob.set_val('x', 1.)
prob.run_model()
self.assertEqual(['C2', 'C1', 'C3'], order_list)
# Extra
with self.assertRaises(ValueError) as cm:
model.set_order(['C2', 'junk', 'C1', 'C3'])
self.assertEqual(str(cm.exception),
"<model> <class Group>: subsystem(s) ['junk'] found in subsystem order but don't exist.")
# Missing
with self.assertRaises(ValueError) as cm:
model.set_order(['C2', 'C3'])
self.assertEqual(str(cm.exception),
"<model> <class Group>: ['C1'] expected in subsystem order and not found.")
# Extra and Missing
with self.assertRaises(ValueError) as cm:
model.set_order(['C2', 'junk', 'C1', 'junk2'])
self.assertEqual(str(cm.exception),
"<model> <class Group>: ['C3'] expected in subsystem order and not found.\n"
"<model> <class Group>: subsystem(s) ['junk', 'junk2'] found in subsystem order "
"but don't exist.")
# Dupes
with self.assertRaises(ValueError) as cm:
model.set_order(['C2', 'C1', 'C3', 'C1'])
self.assertEqual(str(cm.exception),
"<model> <class Group>: Duplicate name(s) found in subsystem order list: ['C1']")
def test_set_order_init_subsystems(self):
prob = om.Problem()
model = prob.model
model.add_subsystem('indeps', om.IndepVarComp('x', 1.))
model.add_subsystem('G1', SetOrderGroup())
prob.setup()
prob.run_model()
# this test passes if it doesn't raise an exception
def test_guess_nonlinear_complex_step(self):
class Discipline(om.Group):
def setup(self):
self.add_subsystem('comp0', om.ExecComp('y=x**2'))
self.add_subsystem('comp1', om.ExecComp('z=2*external_input'),
promotes_inputs=['external_input'])
self.add_subsystem('balance', om.BalanceComp('x', lhs_name='y', rhs_name='z'),
promotes_outputs=['x'])
self.connect('comp0.y', 'balance.y')
self.connect('comp1.z', 'balance.z')
self.connect('x', 'comp0.x')
self.nonlinear_solver = om.NewtonSolver(iprint=2, solve_subsystems=True)
self.linear_solver = om.DirectSolver()
def guess_nonlinear(self, inputs, outputs, residuals):
if outputs._data.dtype == np.complex:
raise RuntimeError('Vector should not be complex when guess_nonlinear is called.')
# inputs are addressed using full path name, regardless of promotion
external_input = inputs['comp1.external_input']
# balance drives x**2 = 2*external_input
x_guess = (2*external_input)**.5
# outputs are addressed by the their promoted names
outputs['x'] = x_guess # perfect guess should converge in 0 iterations
p = om.Problem()
p.model.add_subsystem('parameters', om.IndepVarComp('input_value', 1.))
p.model.add_subsystem('discipline', Discipline())
p.model.connect('parameters.input_value', 'discipline.external_input')
p.setup(force_alloc_complex=True)
p.run_model()
self.assertEqual(p.model.nonlinear_solver._iter_count, 0)
assert_near_equal(p['discipline.x'], 1.41421356, 1e-6)
totals = p.check_totals(of=['discipline.comp1.z'], wrt=['parameters.input_value'], method='cs', out_stream=None)
for key, val in totals.items():
assert_near_equal(val['rel error'][0], 0.0, 1e-15)
def test_set_order_in_config_error(self):
class SimpleGroup(om.Group):
def setup(self):
self.add_subsystem('comp1', om.IndepVarComp('x', 5.0))
self.add_subsystem('comp2', om.ExecComp('b=2*a'))
def configure(self):
self.set_order(['C2', 'C1'])
prob = om.Problem()
model = prob.model
model.add_subsystem('C1', SimpleGroup())
model.add_subsystem('C2', SimpleGroup())
msg = "'C1' <class SimpleGroup>: Cannot call set_order in the configure method"
with self.assertRaises(RuntimeError) as cm:
prob.setup()
self.assertEqual(str(cm.exception), msg)
def test_set_order_after_setup(self):
class SimpleGroup(om.Group):
def setup(self):
self.add_subsystem('comp1', om.IndepVarComp('x', 5.0))
self.add_subsystem('comp2', om.ExecComp('b=2*a'))
prob = om.Problem()
model = prob.model
model.add_subsystem('C1', SimpleGroup())
model.add_subsystem('C2', SimpleGroup())
prob.setup()
prob.model.set_order(['C2', 'C1'])
msg = "Problem: Cannot call set_order without calling setup after"
with self.assertRaises(RuntimeError) as cm:
prob.run_model()
self.assertEqual(str(cm.exception), msg)
def test_set_order_normal(self):
class SimpleGroup(om.Group):
def setup(self):
self.add_subsystem('comp1', om.IndepVarComp('x', 5.0))
self.add_subsystem('comp2', om.ExecComp('b=2*a'))
prob = om.Problem()
model = prob.model
model.add_subsystem('C1', SimpleGroup())
model.add_subsystem('C2', SimpleGroup())
prob.model.set_order(['C2', 'C1'])
prob.setup()
prob.run_model()
def test_double_setup_for_set_order(self):
class SimpleGroup(om.Group):
def setup(self):
self.add_subsystem('comp1', om.IndepVarComp('x', 5.0))
self.add_subsystem('comp2', om.ExecComp('b=2*a'))
prob = om.Problem()
model = prob.model
model.add_subsystem('C1', SimpleGroup())
model.add_subsystem('C2', SimpleGroup())
prob.setup()
model.set_order(['C2', 'C1'])
prob.setup()
prob.run_model()
@unittest.skipUnless(MPI, "MPI is required.")
class TestGroupMPISlice(unittest.TestCase):
N_PROCS = 2
def test_om_slice_2d_mpi(self):
class MyComp1(om.ExplicitComponent):
def initialize(self):
self.options['distributed'] = True
def setup(self):
self.add_input('x', np.ones(4), src_indices=om.slicer[:, 2])
self.add_output('y', 1.0)
def compute(self, inputs, outputs):
outputs['y'] = np.sum(inputs['x'])*2.0
arr = np.array([[1, 2, 3, 4], [1, 2, 3, 4], [1, 2, 3, 4], [1, 2, 3, 4]])
p = om.Problem()
p.model.add_subsystem('indep', om.IndepVarComp('x', arr))
p.model.add_subsystem('C1', MyComp1())
p.model.connect('indep.x', 'C1.x')
p.setup()
p.run_model()
val = p.get_val('C1.x', get_remote=False)
assert_near_equal(val, np.array([3, 3, 3, 3]))
def test_om_slice_3d_mpi(self):
class MyComp1(om.ExplicitComponent):
def initialize(self):
self.options['distributed'] = True
def setup(self):
self.add_input('x', np.ones(4), src_indices=om.slicer[:, 1, 2])
self.add_output('y', 1.0)
def compute(self, inputs, outputs):
outputs['y'] = np.sum(inputs['x'])*2.0
arr = np.arange(64, dtype=int).reshape(4, 4, 4)
p = om.Problem()
p.model.add_subsystem('indep', om.IndepVarComp('x', arr))
p.model.add_subsystem('C1', MyComp1())
p.model.connect('indep.x', 'C1.x')
p.setup()
p.run_model()
assert_near_equal(p['C1.x'], np.array([6, 22, 38, 54]))
def test_om_slice_4D_with_ellipsis_mpi(self):
class SlicerComp(om.ExplicitComponent):
def setup(self):
self.add_input('x', np.ones(shape=(5, 3)))
# Connect
p = om.Problem()
arr = np.random.randint(5, size=(3, 5, 3, 2))
p.model.add_subsystem('indep', om.IndepVarComp('x', arr))
p.model.add_subsystem('row1_comp', SlicerComp())
p.model.add_subsystem('row4_comp', SlicerComp())
p.model.connect('indep.x', 'row1_comp.x', src_indices=om.slicer[1, ..., 1])
p.model.connect('indep.x', 'row4_comp.x', src_indices=om.slicer[2, ..., 1])
p.setup()
p.run_model()
assert_near_equal(p.get_val('row1_comp.x'), arr[1, ..., 1])
assert_near_equal(p.get_val('row4_comp.x'), arr[2, ..., 1])
def test_om_slice_negative_stop_mpi(self):
class MyComp1(om.ExplicitComponent):
def initialize(self):
self.options['distributed'] = True
def setup(self):
self.add_input('x', np.ones(4), src_indices=om.slicer[:,-1])
self.add_output('y', 1.0)
def compute(self, inputs, outputs):
outputs['y'] = np.sum(inputs['x'])*2.0
arr = np.array([[1, 2, 3, 4], [1, 2, 3, 4], [1, 2, 3, 4], [1, 2, 3, 4]])
p = om.Problem()
p.model.add_subsystem('indep', om.IndepVarComp('x', arr))
p.model.add_subsystem('C1', MyComp1())
p.model.connect('indep.x', 'C1.x')
p.setup()
p.run_model()
assert_near_equal(p['C1.x'], np.array([4, 4, 4, 4]))
class TestGroupPromotes(unittest.TestCase):
def test_promotes_outputs_in_config(self):
class SimpleGroup(om.Group):
def setup(self):
self.add_subsystem('comp1', om.IndepVarComp('x', 5.0))
self.add_subsystem('comp2', om.ExecComp('b=2*a'))
def configure(self):
self.promotes('comp2', outputs=['b'])
top = om.Problem(model=SimpleGroup())
top.setup()
self.assertEqual(top['b'], 1)
with self.assertRaises(KeyError) as cm:
top['a']
self.assertEqual(str(cm.exception),
"'<model> <class SimpleGroup>: Variable \"a\" not found.'")
def test_promotes_inputs_in_config(self):
class SimpleGroup(om.Group):
def setup(self):
self.add_subsystem('comp1', om.IndepVarComp('x', 5.0))
self.add_subsystem('comp2', om.ExecComp('b=2*a'))
def configure(self):
self.promotes('comp2', inputs=['a'])
top = om.Problem(model=SimpleGroup())
top.setup()
self.assertEqual(top['a'], 1)
with self.assertRaises(KeyError) as cm:
top['b']
self.assertEqual(str(cm.exception),
"'<model> <class SimpleGroup>: Variable \"b\" not found.'")
def test_promotes_any_in_config(self):
class SimpleGroup(om.Group):
def setup(self):
self.add_subsystem('comp1', om.IndepVarComp('x', 5.0))
self.add_subsystem('comp2', om.ExecComp('b=2*a'))
def configure(self):
self.promotes('comp1', any=['*'])
top = om.Problem(model=SimpleGroup())
top.setup()
self.assertEqual(top['x'], 5)
with self.assertRaises(KeyError) as cm:
top['a']
self.assertEqual(str(cm.exception),
"'<model> <class SimpleGroup>: Variable \"a\" not found.'")
def test_promotes_alias(self):
class SubGroup(om.Group):
def setup(self):
self.add_subsystem('comp1', om.ExecComp('x=2.0*a+3.0*b', a=3.0, b=4.0))
def configure(self):
self.promotes('comp1', inputs=['a'])
class TopGroup(om.Group):
def setup(self):
self.add_subsystem('sub', SubGroup())
def configure(self):
self.sub.promotes('comp1', inputs=['b'])
self.promotes('sub', inputs=[('b', 'bb')])
top = om.Problem(model=TopGroup())
top.setup()
self.assertEqual(top['bb'], 4.0)
def test_promotes_alias_from_parent(self):
class SubGroup(om.Group):
def setup(self):
self.add_subsystem('comp1', om.ExecComp('x=2.0*a+3.0*b+c', a=3.0, b=4.0))
def configure(self):
self.promotes('comp1', inputs=[('b', 'bb')])
class TopGroup(om.Group):
def setup(self):
self.add_subsystem('sub', SubGroup())
def configure(self):
self.sub.promotes('comp1', inputs=['b'])
top = om.Problem(model=TopGroup())
with self.assertRaises(RuntimeError) as context:
top.setup()
self.assertEqual(str(context.exception),
"'sub' <class SubGroup>: Trying to promote 'b' when it has been aliased to 'bb'.")
def test_promotes_alias_src_indices(self):
class SimpleGroup(om.Group):
def setup(self):
self.add_subsystem('indep', om.IndepVarComp(), promotes=['*'])
self.add_subsystem('comp1', om.ExecComp('b=2*a', a=np.ones(3), b=np.ones(3)))
self.add_subsystem('comp2', om.ExecComp('b=4*a', a=np.ones(2), b=np.ones(2)))
def configure(self):
self.indep.add_output('x', np.array(range(5)))
self.promotes('comp1', inputs=[('a', 'x')], src_indices=[0, 1, 2])
self.promotes('comp2', inputs=[('a', 'x')], src_indices=[3, 4])
p = om.Problem(model=SimpleGroup())
p.setup()
p.run_model()
assert_near_equal(p['comp1.b'], np.array([0, 2, 4]))
assert_near_equal(p['comp2.b'], np.array([12, 16]))
def test_promotes_wildcard_rename(self):
class SubGroup(om.Group):
def setup(self):
self.add_subsystem('comp1', om.ExecComp('x=2.0+bb', bb=4.0))
def configure(self):
self.promotes('comp1', inputs=["b*"])
class TopGroup(om.Group):
def setup(self):
self.add_subsystem('sub', SubGroup())
def configure(self):
self.sub.promotes('comp1', inputs=[('bb', 'xx')])
top = om.Problem(model=TopGroup())
with self.assertRaises(RuntimeError) as context:
top.setup()
self.assertEqual(str(context.exception),
"'sub.comp1' <class ExecComp>: promotes_inputs 'b*' matched 'bb' but 'bb' has been "
"aliased to 'xx'.")
def test_promotes_wildcard_name(self):
class SubGroup(om.Group):
def setup(self):
self.add_subsystem('comp1', om.ExecComp('x=2.0+bb', bb=4.0))
def configure(self):
self.promotes('comp1', inputs=["b*"])
class TopGroup(om.Group):
def setup(self):
self.add_subsystem('sub', SubGroup())
def configure(self):
self.sub.promotes('comp1', inputs=['bb'])
top = om.Problem(model=TopGroup())
top.setup()
def test_multiple_promotes(self):
class BranchGroup(om.Group):
def setup(self):
b1 = self.add_subsystem('Branch1', om.Group())
g1 = b1.add_subsystem('G1', om.Group())
g2 = g1.add_subsystem('G2', om.Group())
g2.add_subsystem('comp1', om.ExecComp('b=2.0*a', a=3.0, b=6.0))
def configure(self):
self.Branch1.G1.G2.promotes('comp1', inputs=['a'])
self.Branch1.G1.promotes('G2', any=['*'])
top = om.Problem(model=BranchGroup())
top.setup()
self.assertEqual(top['Branch1.G1.a'], 3)
self.assertEqual(top['Branch1.G1.comp1.b'], 6)
with self.assertRaises(KeyError) as cm:
top['Branch1.G1.comp1.a']
self.assertEqual(str(cm.exception),
"'<model> <class BranchGroup>: Variable \"Branch1.G1.comp1.a\" not found.'")
def test_multiple_promotes_collision(self):
class SubGroup(om.Group):
def setup(self):
self.add_subsystem('comp1', om.ExecComp('x=2.0*a+3.0*b', a=3.0, b=4.0))
def configure(self):
self.promotes('comp1', inputs=['a'])
class TopGroup(om.Group):
def setup(self):
self.add_subsystem('sub', SubGroup())
def configure(self):
self.sub.promotes('comp1', inputs=['b'])
self.promotes('sub', inputs=['b'])
top = om.Problem(model=TopGroup())
top.setup()
self.assertEqual(top['sub.a'], 3)
self.assertEqual(top['b'], 4)
def test_multiple_promotes_src_indices(self):
class SimpleGroup(om.Group):
def setup(self):
self.add_subsystem('indep', om.IndepVarComp(), promotes=['*'])
self.add_subsystem('comp1', om.ExecComp('c=2*a*b', a=np.ones(3), b=np.ones(3), c=np.ones(3)))
def configure(self):
self.indep.add_output('x', np.array(range(5)))
self.promotes('comp1', inputs=[('a', 'x'), ('b', 'x')], src_indices=[0, 2, 4])
p = om.Problem(model=SimpleGroup())
p.setup()
p.run_model()
assert_near_equal(p['x'], np.array([0, 1, 2, 3, 4]))
assert_near_equal(p['comp1.a'], np.array([0, 2, 4]))
assert_near_equal(p['comp1.b'], np.array([0, 2, 4]))
assert_near_equal(p['comp1.c'], np.array([0, 8, 32]))
def test_promotes_src_indices_flat(self):
class SimpleGroup(om.Group):
def setup(self):
self.add_subsystem('indep', om.IndepVarComp('a', np.array(range(9)).reshape(3, 3)), promotes=['*'])
self.add_subsystem('comp1', om.ExecComp('b=2*a', a=np.ones(3), b=np.ones(3)))
def configure(self):
self.promotes('comp1', inputs=['a'], src_indices=[0, 4, 8], flat_src_indices=True)
p = om.Problem(model=SimpleGroup())
p.setup()
p.run_model()
assert_near_equal(p['a'], np.array([[0, 1, 2],
[3, 4, 5],
[6, 7, 8]]))
assert_near_equal(p['comp1.a'], np.array([0, 4, 8]))
assert_near_equal(p['comp1.b'], np.array([0, 8, 16]))
def test_promotes_bad_spec(self):
class SimpleGroup(om.Group):
def setup(self):
self.add_subsystem('comp', om.ExecComp('b=2*a', a=np.zeros(5), b=np.zeros(5)))
def configure(self):
self.promotes('comp', inputs='a')
top = om.Problem(model=SimpleGroup())
with self.assertRaises(RuntimeError) as cm:
top.setup()
self.assertEqual(str(cm.exception),
"<model> <class SimpleGroup>: Trying to promote inputs='a', "
"but an iterator of strings and/or tuples is required.")
def test_promotes_src_indices_bad_type(self):
class SimpleGroup(om.Group):
def setup(self):
self.add_subsystem('comp2', om.ExecComp('b=2*a', a=np.zeros(5), b=np.zeros(5)))
def configure(self):
self.promotes('comp2', inputs=['a'], src_indices=1.0)
top = om.Problem(model=SimpleGroup())
with self.assertRaises(TypeError) as cm:
top.setup()
self.assertEqual(str(cm.exception),
"<model> <class SimpleGroup>: The src_indices argument should be an int, list, "
"tuple, ndarray or Iterable, but src_indices for promotes from 'comp2' are "
"<class 'float'>.")
def test_promotes_src_indices_bad_dtype(self):
class SimpleGroup(om.Group):
def setup(self):
self.add_subsystem('comp2', om.ExecComp('b=2*a', a=np.zeros(5), b=np.zeros(5)))
def configure(self):
self.promotes('comp2', inputs=['a'], src_indices=np.array([0], dtype=np.complex))
top = om.Problem(model=SimpleGroup())
with self.assertRaises(TypeError) as cm:
top.setup()
self.assertEqual(str(cm.exception),
"<model> <class SimpleGroup>: src_indices must contain integers, but src_indices "
"for promotes from 'comp2' are type <class 'numpy.complex128'>.")
def test_promotes_src_indices_bad_shape(self):
class SimpleGroup(om.Group):
def setup(self):
self.add_subsystem('indep', om.IndepVarComp(), promotes=['*'])
self.add_subsystem('comp1', om.ExecComp('b=2*a', a=np.ones(5), b=np.ones(5)))
def configure(self):
self.indep.add_output('a1', np.ones(3))
self.promotes('comp1', inputs=['a'], src_indices=[0, 1, 2])
p = om.Problem(model=SimpleGroup())
with self.assertRaises(ValueError) as cm:
p.setup()
self.assertEqual(str(cm.exception),
"Shape of indices does not match shape for 'a': Expected (5,) but got (3,).")
def test_promotes_src_indices_different(self):
class SimpleGroup(om.Group):
def setup(self):
self.add_subsystem('indep', om.IndepVarComp(), promotes=['*'])
self.add_subsystem('comp1', om.ExecComp('z=x+y', x=np.ones(3), y=np.ones(3), z=np.ones(3)))
def configure(self):
self.indep.add_output('x', 2*np.array(range(5)))
self.indep.add_output('y', 3*np.array(range(5)))
self.promotes('comp1', inputs=['x'], src_indices=[0, 2, 4])
self.promotes('comp1', inputs=['y'], src_indices=[1, 2, 3])
p = om.Problem(model=SimpleGroup())
p.setup()
p.run_model()
assert_near_equal(p['indep.x'], np.array([0, 2, 4, 6, 8]))
assert_near_equal(p['indep.y'], np.array([0, 3, 6, 9, 12]))
assert_near_equal(p['comp1.x'], np.array([0, 4, 8]))
assert_near_equal(p['comp1.y'], np.array([3, 6, 9]))
assert_near_equal(p['comp1.z'], np.array([3, 10, 17]))
def test_promotes_src_indices_mixed(self):
class SimpleGroup(om.Group):
def setup(self):
self.add_subsystem('indep', om.IndepVarComp(), promotes=['*'])
self.add_subsystem('comp1', om.ExecComp('z=x+y',
x=np.ones(3),
y={'value': np.ones(3),
'src_indices': [1, 2, 3]},
z=np.ones(3)),
promotes_inputs=['y'])
def configure(self):
self.indep.add_output('x', 2*np.array(range(5)))
self.indep.add_output('y', 3*np.array(range(5)))
self.promotes('comp1', inputs=['x'], src_indices=[0, 2, 4])
p = om.Problem(model=SimpleGroup())
p.setup()
p.run_model()
assert_near_equal(p['indep.x'], np.array([0, 2, 4, 6, 8]))
assert_near_equal(p['indep.y'], np.array([0, 3, 6, 9, 12]))
assert_near_equal(p['comp1.x'], np.array([0, 4, 8]))
assert_near_equal(p['comp1.y'], np.array([3, 6, 9]))
assert_near_equal(p['comp1.z'], np.array([3, 10, 17]))
def test_promotes_src_indices_mixed_array(self):
class SimpleGroup(om.Group):
def setup(self):
self.add_subsystem('indep', om.IndepVarComp(), promotes=['*'])
self.add_subsystem('comp1', om.ExecComp('z=x+y',
x=np.ones(3),
y={'value': np.ones(3),
'src_indices': [1, 2, 3]},
z=np.ones(3)),
promotes_inputs=['y'])
def configure(self):
self.indep.add_output('x', 2*np.array(range(5)))
self.indep.add_output('y', 3*np.array(range(5)))
self.promotes('comp1', inputs=['x'],
src_indices=np.array([0, 2, 4]))
p = om.Problem(model=SimpleGroup())
p.setup()
p.run_model()
assert_near_equal(p['indep.x'], np.array([0, 2, 4, 6, 8]))
assert_near_equal(p['indep.y'], np.array([0, 3, 6, 9, 12]))
assert_near_equal(p['comp1.x'], np.array([0, 4, 8]))
assert_near_equal(p['comp1.y'], np.array([3, 6, 9]))
assert_near_equal(p['comp1.z'], np.array([3, 10, 17]))
def test_promotes_src_indices_wildcard(self):
class SimpleGroup(om.Group):
def setup(self):
self.add_subsystem('indep', om.IndepVarComp(), promotes=['*'])
self.add_subsystem('comp1', om.ExecComp('z=x+y', x=np.ones(3), y=np.ones(3), z=np.ones(3)))
def configure(self):
self.indep.add_output('x', 2*np.array(range(5)))
self.indep.add_output('y', 3*np.array(range(5)))
self.promotes('comp1', inputs=['*'], src_indices=[0, 2, 4])
p = om.Problem(model=SimpleGroup())
p.setup()
p.run_model()
assert_near_equal(p['indep.x'], np.array([0, 2, 4, 6, 8]))
assert_near_equal(p['indep.y'], np.array([0, 3, 6, 9, 12]))
assert_near_equal(p['comp1.x'], np.array([0, 4, 8]))
assert_near_equal(p['comp1.y'], np.array([0, 6, 12]))
assert_near_equal(p['comp1.z'], np.array([0, 10, 20]))
def test_promotes_src_indices_wildcard_any(self):
class SimpleGroup(om.Group):
def setup(self):
self.add_subsystem('indep', om.IndepVarComp(), promotes=['*'])
self.add_subsystem('comp1', om.ExecComp('z=x+y', x=np.ones(3), y=np.ones(3), z=np.ones(3)))
def configure(self):
self.indep.add_output('x', 2*np.array(range(5)))
self.indep.add_output('y', 3*np.array(range(5)))
self.promotes('comp1', any=['*'], src_indices=[0, 2, 4])
p = om.Problem(model=SimpleGroup())
with assert_warning(UserWarning,
"<model> <class SimpleGroup>: src_indices have been specified with promotes 'any'. "
"Note that src_indices only apply to matching inputs."):
p.setup()
p.run_model()
assert_near_equal(p['indep.x'], np.array([0, 2, 4, 6, 8]))
assert_near_equal(p['indep.y'], np.array([0, 3, 6, 9, 12]))
assert_near_equal(p['comp1.x'], np.array([0, 4, 8]))
assert_near_equal(p['comp1.y'], np.array([0, 6, 12]))
assert_near_equal(p['comp1.z'], np.array([0, 10, 20]))
def test_promotes_src_indices_wildcard_output(self):
class SimpleGroup(om.Group):
def setup(self):
self.add_subsystem('indep', om.IndepVarComp(), promotes=['*'])
self.add_subsystem('comp1', om.ExecComp('z=x+y', x=np.ones(3), y=np.ones(3), z=np.ones(3)))
def configure(self):
self.indep.add_output('x', 2*np.array(range(5)))
self.indep.add_output('y', 3*np.array(range(5)))
self.promotes('comp1', outputs=['*'], src_indices=[0, 2, 4])
p = om.Problem(model=SimpleGroup())
with self.assertRaises(RuntimeError) as cm:
p.setup()
self.assertEqual(str(cm.exception),
"<model> <class SimpleGroup>: Trying to promote outputs ['*'] "
"while specifying src_indices [0, 2, 4] is not meaningful.")
def test_promotes_src_indices_collision(self):
class SubGroup(om.Group):
def setup(self):
self.add_subsystem('comp', om.ExecComp('x=2.0*a+3.0*b',
a=np.ones(3),
b=np.ones(3),
x=np.ones(3)))
def configure(self):
self.promotes('comp', inputs=['a'], src_indices=[0, 2, 4])
class TopGroup(om.Group):
def setup(self):
self.add_subsystem('ind', om.IndepVarComp())
self.add_subsystem('sub', SubGroup())
def configure(self):
self.ind.add_output('a', val=np.ones(5))
self.promotes('ind', outputs=['a'])
self.promotes('sub', inputs=['a'], src_indices=[0, 1, 2])
p = om.Problem(model=TopGroup())
with self.assertRaises(RuntimeError) as cm:
p.setup()
self.assertEqual(str(cm.exception),
"'sub' <class SubGroup>: Trying to promote input 'a' with src_indices [0 1 2], "
"but src_indices have already been specified as [0 2 4].")
class MyComp(om.ExplicitComponent):
def __init__(self, input_shape, src_indices=None, flat_src_indices=False):
super().__init__()
self._input_shape = input_shape
self._src_indices = src_indices
self._flat_src_indices = flat_src_indices
def setup(self):
self.add_input('x', val=np.zeros(self._input_shape),
src_indices=self._src_indices, flat_src_indices=self._flat_src_indices)
self.add_output('y', val=np.zeros(self._input_shape))
def compute(self, inputs, outputs):
outputs['y'] = 2.0 * inputs['x']
class TestConnect(unittest.TestCase):
def setUp(self):
prob = om.Problem(om.Group())
sub = prob.model.add_subsystem('sub', om.Group())
idv = sub.add_subsystem('src', om.IndepVarComp())
idv.add_output('x', np.arange(15).reshape((5, 3))) # array
idv.add_output('s', 3.) # scalar
sub.add_subsystem('tgt', om.ExecComp('y = x'))
sub.add_subsystem('cmp', om.ExecComp('z = x'))
sub.add_subsystem('arr', om.ExecComp('a = x', x=np.zeros(2)))
self.sub = sub
self.prob = prob
def test_src_indices_as_int_list(self):
self.sub.connect('src.x', 'tgt.x', src_indices=[1])
def test_src_indices_as_int_array(self):
self.sub.connect('src.x', 'tgt.x', src_indices=np.zeros(1, dtype=int))
def test_src_indices_as_float_list(self):
msg = "src_indices must contain integers, but src_indices for " + \
"connection from 'src.x' to 'tgt.x' is <.* 'numpy.float64'>."
with self.assertRaisesRegex(TypeError, msg):
self.sub.connect('src.x', 'tgt.x', src_indices=[1.0])
def test_src_indices_as_float_array(self):
msg = "src_indices must contain integers, but src_indices for " + \
"connection from 'src.x' to 'tgt.x' is <.* 'numpy.float64'>."
with self.assertRaisesRegex(TypeError, msg):
self.sub.connect('src.x', 'tgt.x', src_indices=np.zeros(1))
def test_src_indices_as_str(self):
msg = "src_indices must be an index array, " + \
"did you mean connect('src.x', [tgt.x, cmp.x])?"
with self.assertRaisesRegex(TypeError, msg):
self.sub.connect('src.x', 'tgt.x', 'cmp.x')
def test_already_connected(self):
msg = "Input 'tgt.x' is already connected to 'src.x'."
self.sub.connect('src.x', 'tgt.x', src_indices=[1])
with self.assertRaisesRegex(RuntimeError, msg):
self.sub.connect('cmp.x', 'tgt.x', src_indices=[1])
def test_invalid_source(self):
msg = "'sub' <class Group>: Attempted to connect from 'src.z' to 'tgt.x', but 'src.z' doesn't exist."
# source and target names can't be checked until setup
# because setup is not called until then
self.sub.connect('src.z', 'tgt.x', src_indices=[1])
with self.assertRaises(NameError) as context:
self.prob.setup()
self.assertEqual(str(context.exception), msg)
self.prob.model._raise_connection_errors = False
with assert_warning(UserWarning, msg):
self.prob.setup()
def test_connect_to_output(self):
msg = "'sub' <class Group>: Attempted to connect from 'tgt.y' to 'cmp.z', " + \
"but 'cmp.z' is an output. " + \
"All connections must be from an output to an input."
# source and target names can't be checked until setup
# because setup is not called until then
self.sub.connect('tgt.y', 'cmp.z')
with self.assertRaises(NameError) as context:
self.prob.setup()
self.assertEqual(str(context.exception), msg)
self.prob.model._raise_connection_errors = False
with assert_warning(UserWarning, msg):
self.prob.setup()
def test_connect_from_input(self):
msg = "'sub' <class Group>: Attempted to connect from 'tgt.x' to 'cmp.x', " + \
"but 'tgt.x' is an input. " + \
"All connections must be from an output to an input."
# source and target names can't be checked until setup
# because setup is not called until then
self.sub.connect('tgt.x', 'cmp.x')
with self.assertRaises(NameError) as context:
self.prob.setup()
self.assertEqual(str(context.exception), msg)
self.prob.model._raise_connection_errors = False
with assert_warning(UserWarning, msg):
self.prob.setup()
def test_explicit_conn_to_prom_inputs(self):
p = om.Problem()
p.model.add_subsystem('indeps', om.IndepVarComp('foo', val=10., units='ft'))
p.model.add_subsystem('C1', om.ExecComp('y=3*x', x={'units': 'ft', 'value': 1.}), promotes_inputs=['x'])
p.model.add_subsystem('C2', om.ExecComp('y=4*x', x={'units': 'ft', 'value': 1.}), promotes_inputs=['x'])
p.model.connect('indeps.foo', 'x')
p.setup()
p.final_setup()
# before bug fix, the following generated an ambiguity error
p['x']
def test_invalid_target(self):
msg = "'sub' <class Group>: Attempted to connect from 'src.x' to 'tgt.z', but 'tgt.z' doesn't exist."
# source and target names can't be checked until setup
# because setup is not called until then
self.sub.connect('src.x', 'tgt.z', src_indices=[1])
with self.assertRaises(NameError) as context:
self.prob.setup()
self.assertEqual(str(context.exception), msg)
self.prob.model._raise_connection_errors = False
with assert_warning(UserWarning, msg):
self.prob.setup()
def test_connect_within_system(self):
msg = "Output and input are in the same System for connection " + \
"from 'tgt.y' to 'tgt.x'."
with self.assertRaisesRegex(RuntimeError, msg):
self.sub.connect('tgt.y', 'tgt.x', src_indices=[1])
def test_connect_within_system_with_promotes(self):
prob = om.Problem()
sub = prob.model.add_subsystem('sub', om.Group())
sub.add_subsystem('tgt', om.ExecComp('y = x'), promotes_outputs=['y'])
sub.connect('y', 'tgt.x', src_indices=[1])
msg = "'sub' <class Group>: Output and input are in the same System for " + \
"connection from 'y' to 'tgt.x'."
with self.assertRaises(RuntimeError) as ctx:
prob.setup()
self.assertEqual(str(ctx.exception), msg)
prob.model._raise_connection_errors = False
with assert_warning(UserWarning, msg):
prob.setup()
def test_connect_units_with_unitless(self):
prob = om.Problem()
prob.model.add_subsystem('px1', om.IndepVarComp('x1', 100.0))
prob.model.add_subsystem('src', om.ExecComp('x2 = 2 * x1', x2={'units': 'degC'}))
prob.model.add_subsystem('tgt', om.ExecComp('y = 3 * x', x={'units': None}))
prob.model.connect('px1.x1', 'src.x1')
prob.model.connect('src.x2', 'tgt.x')
msg = "<model> <class Group>: Output 'src.x2' with units of 'degC' is connected " \
"to input 'tgt.x' which has no units."
with assert_warning(UserWarning, msg):
prob.setup()
self.prob.model._raise_connection_errors = False
with assert_warning(UserWarning, msg):
prob.setup()
def test_connect_incompatible_units(self):
msg = "<model> <class Group>: Output units of 'degC' for 'src.x2' are incompatible " + \
"with input units of 'm' for 'tgt.x'."
prob = om.Problem()
prob.model.add_subsystem('px1', om.IndepVarComp('x1', 100.0))
prob.model.add_subsystem('src', om.ExecComp('x2 = 2 * x1', x2={'units': 'degC'}))
prob.model.add_subsystem('tgt', om.ExecComp('y = 3 * x', x={'units': 'm'}))
prob.model.connect('px1.x1', 'src.x1')
prob.model.connect('src.x2', 'tgt.x')
with self.assertRaises(RuntimeError) as context:
prob.setup()
self.assertEqual(str(context.exception), msg)
prob.model._raise_connection_errors = False
with assert_warning(UserWarning, msg):
prob.setup()
def test_connect_units_with_nounits(self):
prob = om.Problem()
prob.model.add_subsystem('px1', om.IndepVarComp('x1', 100.0))
prob.model.add_subsystem('src', om.ExecComp('x2 = 2 * x1'))
prob.model.add_subsystem('tgt', om.ExecComp('y = 3 * x', x={'units': 'degC'}))
prob.model.connect('px1.x1', 'src.x1')
prob.model.connect('src.x2', 'tgt.x')
prob.set_solver_print(level=0)
msg = "<model> <class Group>: Input 'tgt.x' with units of 'degC' is " \
"connected to output 'src.x2' which has no units."
with assert_warning(UserWarning, msg):
prob.setup()
prob.run_model()
assert_near_equal(prob['tgt.y'], 600.)
def test_connect_units_with_nounits_prom(self):
prob = om.Problem()
prob.model.add_subsystem('px1', om.IndepVarComp('x', 100.0), promotes_outputs=['x'])
prob.model.add_subsystem('src', om.ExecComp('y = 2 * x'), promotes=['x', 'y'])
prob.model.add_subsystem('tgt', om.ExecComp('z = 3 * y', y={'units': 'degC'}), promotes=['y'])
prob.set_solver_print(level=0)
msg = "<model> <class Group>: Input 'tgt.y' with units of 'degC' is " \
"connected to output 'src.y' which has no units."
with assert_warning(UserWarning, msg):
prob.setup()
prob.run_model()
assert_near_equal(prob['tgt.z'], 600.)
def test_mix_promotes_types(self):
prob = om.Problem()
prob.model.add_subsystem('src', om.ExecComp(['y = 2 * x', 'y2 = 3 * x']),
promotes=['x', 'y'], promotes_outputs=['y2'])
with self.assertRaises(RuntimeError) as context:
prob.setup()
self.assertEqual(str(context.exception),
"'src' <class ExecComp>: 'promotes' cannot be used at the same time as "
"'promotes_inputs' or 'promotes_outputs'.")
def test_mix_promotes_types2(self):
prob = om.Problem()
prob.model.add_subsystem('src', om.ExecComp(['y = 2 * x', 'y2 = 3 * x2']),
promotes=['x', 'y'], promotes_inputs=['x2'])
with self.assertRaises(RuntimeError) as context:
prob.setup()
self.assertEqual(str(context.exception),
"'src' <class ExecComp>: 'promotes' cannot be used at the same time as "
"'promotes_inputs' or 'promotes_outputs'.")
def test_nested_nested_conn(self):
prob = om.Problem()
root = prob.model
root.add_subsystem('p', om.IndepVarComp('x', 1.0))
G1 = root.add_subsystem('G1', om.Group())
par1 = G1.add_subsystem('par1', om.Group())
par1.add_subsystem('c2', om.ExecComp('y = x * 2.0'))
par1.add_subsystem('c4', om.ExecComp('y = x * 4.0'))
prob.model.add_design_var('p.x')
prob.model.add_constraint('G1.par1.c4.y', upper=0.0)
root.connect('p.x', 'G1.par1.c2.x')
root.connect('G1.par1.c2.y', 'G1.par1.c4.x')
prob.setup()
prob.run_driver()
assert_near_equal(prob['G1.par1.c4.y'], 8.0)
def test_bad_shapes(self):
self.sub.connect('src.s', 'arr.x')
msg = "'sub' <class Group>: The source and target shapes do not match or are ambiguous " + \
"for the connection 'sub.src.s' to 'sub.arr.x'. The source shape is (1,) " + \
"but the target shape is (2,)."
with self.assertRaises(ValueError) as context:
self.prob.setup()
self.assertEqual(str(context.exception), msg)
self.prob.model._raise_connection_errors = False
with assert_warning(UserWarning, msg):
self.prob.setup()
def test_bad_indices_shape(self):
p = om.Problem()
p.model.add_subsystem('IV', om.IndepVarComp('x', np.arange(12).reshape((4, 3))))
p.model.add_subsystem('C1', om.ExecComp('y=sum(x)*2.0', x=np.zeros((2, 2))))
p.model.connect('IV.x', 'C1.x', src_indices=[(1, 1)])
msg = "<model> <class Group>: The source indices [[1 1]] do not specify a valid shape " + \
"for the connection 'IV.x' to 'C1.x'. The target shape is (2, 2) but " + \
"indices are (1, 2)."
with self.assertRaises(ValueError) as context:
p.setup()
self.assertEqual(str(context.exception), msg)
p.model._raise_connection_errors = False
with assert_warning(UserWarning, msg):
p.setup()
def test_bad_indices_dimensions(self):
self.sub.connect('src.x', 'arr.x', src_indices=[(2, -1, 2), (2, 2, 2)],
flat_src_indices=False)
msg = ("'sub' <class Group>: The source indices [[ 2 -1 2] [ 2 2 2]] do not specify a "
"valid shape for the connection 'sub.src.x' to 'sub.arr.x'. "
"The source has 2 dimensions but the indices expect 3.")
try:
self.prob.setup()
except ValueError as err:
self.assertEqual(str(err), msg)
else:
self.fail('Exception expected.')
self.prob.model._raise_connection_errors = False
with assert_warning(UserWarning, msg):
self.prob.setup()
def test_bad_indices_index(self):
# the index value within src_indices is outside the valid range for the source
self.sub.connect('src.x', 'arr.x', src_indices=[(2, -1), (4, 4)],
flat_src_indices=False)
msg = ("'sub' <class Group>: The source indices do not specify a valid index for the "
"connection 'sub.src.x' to 'sub.arr.x'. Index '4' "
"is out of range for source dimension of size 3.")
try:
self.prob.setup()
except ValueError as err:
self.assertEqual(str(err), msg)
else:
self.fail('Exception expected.')
self.prob.model._raise_connection_errors = False
with assert_warning(UserWarning, msg):
self.prob.setup()
class TestSrcIndices(unittest.TestCase):
def create_problem(self, src_shape, tgt_shape, src_indices=None, flat_src_indices=False,
promotes=None, raise_connection_errors=True):
prob = om.Problem()
prob.model.add_subsystem('indeps', om.IndepVarComp('x', shape=src_shape),
promotes=promotes)
prob.model.add_subsystem('C1', MyComp(tgt_shape,
src_indices=src_indices if promotes else None,
flat_src_indices=flat_src_indices),
promotes=promotes)
if promotes is None:
prob.model.connect('indeps.x', 'C1.x', src_indices=src_indices,
flat_src_indices=flat_src_indices)
if not raise_connection_errors:
prob.model._raise_connection_errors = False
prob.setup()
def test_src_indices_shape(self):
self.create_problem(src_shape=(3, 3), tgt_shape=(2, 2),
src_indices=[[4, 5], [7, 8]],
flat_src_indices=True)
def test_src_indices_shape_bad_idx_flat(self):
msg = "<model> <class Group>: The source indices do not specify a valid index " + \
"for the connection 'indeps.x' to 'C1.x'. " + \
"Index '9' is out of range for source dimension of size 9."
try:
self.create_problem(src_shape=(3, 3), tgt_shape=(2, 2),
src_indices=[[4, 5], [7, 9]],
flat_src_indices=True)
except Exception as err:
self.assertEqual(str(err), msg)
else:
self.fail("Exception expected.")
with assert_warning(UserWarning, msg):
self.create_problem(src_shape=(3, 3), tgt_shape=(2, 2),
src_indices=[[4, 5], [7, 9]],
flat_src_indices=True,
raise_connection_errors=False)
def test_src_indices_shape_bad_idx_flat_promotes(self):
msg = "<model> <class Group>: The source indices do not specify a valid index " + \
"for the connection 'indeps.x' to 'C1.x'. " + \
"Index '9' is out of range for source dimension of size 9."
try:
self.create_problem(src_shape=(3, 3), tgt_shape=(2, 2),
src_indices=[[4, 5], [7, 9]],
flat_src_indices=True, promotes=['x'])
except Exception as err:
self.assertEqual(str(err), msg)
else:
self.fail("Exception expected.")
with assert_warning(UserWarning, msg):
self.create_problem(src_shape=(3, 3), tgt_shape=(2, 2),
src_indices=[[4, 5], [7, 9]],
flat_src_indices=True,
raise_connection_errors=False)
def test_src_indices_shape_bad_idx_flat_neg(self):
msg = "<model> <class Group>: The source indices do not specify a valid index " + \
"for the connection 'indeps.x' to 'C1.x'. " + \
"Index '-10' is out of range for source dimension of size 9."
try:
self.create_problem(src_shape=(3, 3), tgt_shape=(2, 2),
src_indices=[[-10, 5], [7, 8]],
flat_src_indices=True)
except Exception as err:
self.assertEqual(str(err), msg)
else:
self.fail("Exception expected.")
with assert_warning(UserWarning, msg):
self.create_problem(src_shape=(3, 3), tgt_shape=(2, 2),
src_indices=[[-10, 5], [7, 8]],
flat_src_indices=True,
raise_connection_errors=False)
class TestGroupAddInput(unittest.TestCase):
def _make_tree_model(self, diff_units=False, diff_vals=False):
p = om.Problem()
model = p.model
if diff_units:
units1 = 'ft'
units2 = 'inch'
else:
units1 = units2 = 'ft'
val = 1.0
g1 = model.add_subsystem("G1", om.Group(), promotes_inputs=['x'])
g2 = g1.add_subsystem("G2", om.Group(), promotes_inputs=['x'])
g2.add_subsystem("C1", om.ExecComp("y = 2. * x",
x={'value': val, 'units': units2},
y={'value': 1.0, 'units': units2}),
promotes_inputs=['x'])
g2.add_subsystem("C2", om.ExecComp("y = 3. * x",
x={'value': val, 'units': units1},
y={'value': 1.0, 'units': units1}),
promotes_inputs=['x'])
g3 = g1.add_subsystem("G3", om.Group(), promotes_inputs=['x'])
if diff_vals: val = 2.0
g3.add_subsystem("C3", om.ExecComp("y = 4. * x",
x={'value': val, 'units': units1},
y={'value': 1.0, 'units': units1}),
promotes_inputs=['x'])
g3.add_subsystem("C4", om.ExecComp("y = 5. * x",
x={'value': val, 'units': units2},
y={'value': 1.0, 'units': units2}),
promotes_inputs=['x'])
par = model.add_subsystem("par", om.ParallelGroup(), promotes_inputs=['x'])
g4 = par.add_subsystem("G4", om.Group(), promotes_inputs=['x'])
if diff_vals: val = 3.0
g4.add_subsystem("C5", om.ExecComp("y = 6. * x",
x={'value': val, 'units': units2},
y={'value': 1.0, 'units': units2}),
promotes_inputs=['x'])
g4.add_subsystem("C6", om.ExecComp("y = 7. * x",
x={'value': val, 'units': units1},
y={'value': 1.0, 'units': units1}),
promotes_inputs=['x'])
g5 = par.add_subsystem("G5", om.Group(), promotes_inputs=['x'])
if diff_vals: val = 4.0
g5.add_subsystem("C7", om.ExecComp("y = 8. * x",
x={'value': val, 'units': units1},
y={'value': 1.0, 'units': units1}),
promotes_inputs=['x'])
g5.add_subsystem("C8", om.ExecComp("y = 9. * x",
x={'value': val, 'units': units2},
y={'value': 1.0, 'units': units2}),
promotes_inputs=['x'])
return p
def test_missing_diff_units(self):
p = om.Problem()
model = p.model
par = model.add_subsystem('par', om.ParallelGroup(), promotes_inputs=['x'])
par.add_subsystem('C1', om.ExecComp('y = 3. * x',
x={'value': 1.0, 'units': 'ft'},
y={'value': 1.0, 'units': 'ft'}),
promotes_inputs=['x'])
par.add_subsystem('C2', om.ExecComp('y = 5. * x',
x={'value': 1.0, 'units': 'inch'},
y={'value': 1.0, 'units': 'inch'}),
promotes_inputs=['x'])
with self.assertRaises(Exception) as cm:
p.setup()
self.assertEqual(cm.exception.args[0],
"<model> <class Group>: The following inputs, ['par.C1.x', 'par.C2.x'], promoted to 'x', are connected but their metadata entries ['units', 'value'] differ. Call <group>.set_input_defaults('x', units=?, val=?), where <group> is the Group named 'par' to remove the ambiguity.")
def test_missing_diff_vals(self):
p = om.Problem()
model = p.model
par = model.add_subsystem('par', om.ParallelGroup(), promotes_inputs=['x'])
par.add_subsystem('C1', om.ExecComp('y = 3. * x', x=1.0), promotes_inputs=['x'])
par.add_subsystem('C2', om.ExecComp('y = 5. * x', x=1.1), promotes_inputs=['x'])
with self.assertRaises(Exception) as cm:
p.setup()
self.assertEqual(cm.exception.args[0],
"<model> <class Group>: The following inputs, ['par.C1.x', 'par.C2.x'], promoted to 'x', are connected but their metadata entries ['value'] differ. Call <group>.set_input_defaults('x', val=?), where <group> is the Group named 'par' to remove the ambiguity.")
def test_conflicting_units(self):
# multiple Group.set_input_defaults calls at same tree level with conflicting units args
p = self._make_tree_model()
model = p.model
g2 = model._get_subsystem('G1.G2')
g2.set_input_defaults('x', units='ft')
g3 = model._get_subsystem('G1.G3')
g3.set_input_defaults('x', units='ft')
g4 = model._get_subsystem('par.G4')
g4.set_input_defaults('x', units='inch')
g5 = model._get_subsystem('par.G5')
g5.set_input_defaults('x', units='ft')
with self.assertRaises(Exception) as cm:
p.setup()
self.assertEqual(cm.exception.args[0], "<model> <class Group>: The subsystems G1.G2 and par.G4 called set_input_defaults for promoted input 'x' with conflicting values for 'units'. Call <group>.set_input_defaults('x', units=?), where <group> is the model to remove the ambiguity.")
def test_conflicting_units_multi_level(self):
# multiple Group.set_input_defaults calls at different tree levels with conflicting units args
p = self._make_tree_model(diff_units=True)
model = p.model
g2 = model._get_subsystem('G1.G2')
g2.set_input_defaults('x', units='km')
g3 = model._get_subsystem('G1.G3')
g3.set_input_defaults('x', units='ft')
g4 = model._get_subsystem('par.G4')
g4.set_input_defaults('x', units='ft')
g5 = model._get_subsystem('par.G5')
g5.set_input_defaults('x', units='ft')
g1 = model._get_subsystem('G1')
g1.set_input_defaults('x', units='inch')
with self.assertRaises(Exception) as cm:
p.setup()
self.assertEqual(cm.exception.args[0], "<model> <class Group>: The subsystems G1 and par.G4 called set_input_defaults for promoted input 'x' with conflicting values for 'units'. Call <group>.set_input_defaults('x', units=?), where <group> is the model to remove the ambiguity.")
def test_override_units(self):
# multiple Group.set_input_defaults calls at different tree levels with conflicting units args
p = self._make_tree_model()
model = p.model
g2 = model._get_subsystem('G1.G2')
g2.set_input_defaults('x', units='km')
g1 = model._get_subsystem('G1')
g1.set_input_defaults('x', units='inch', val=2.)
msg = "Groups 'G1' and 'G1.G2' called set_input_defaults for the input 'x' with conflicting 'units'. The value (inch) from 'G1' will be used."
testlogger = TestLogger()
p.setup(check=True, logger=testlogger)
p.final_setup()
self.assertEqual(testlogger.get('warning')[1], msg)
def test_sub_sub_override(self):
p = om.Problem()
model = p.model
G1 = model.add_subsystem('G1', om.Group())
G1.set_input_defaults('x', units='mm', val=1.)
G2 = G1.add_subsystem('G2', om.Group(), promotes=['x'])
G3 = G2.add_subsystem('G3', om.Group(), promotes=['x'])
G3.add_subsystem('C1', om.ExecComp('y = 3.*x', x={'units': 'm'}), promotes=['x'])
G3.add_subsystem('C2', om.ExecComp('y = 4.*x', x={'units': 'cm'}), promotes=['x'])
G3.set_input_defaults('x', units='cm')
msg = "Groups 'G1' and 'G1.G2.G3' called set_input_defaults for the input 'x' with conflicting 'units'. The value (mm) from 'G1' will be used."
testlogger = TestLogger()
p.setup(check=True, logger=testlogger)
p.final_setup()
self.assertEqual(testlogger.get('warning')[1], msg)
def test_sub_sets_parent_meta(self):
p = om.Problem()
model = p.model
G1 = model.add_subsystem('G1', om.Group())
G1.set_input_defaults('x', val=2.)
G2 = G1.add_subsystem('G2', om.Group(), promotes=['x'])
G2.add_subsystem('C1', om.ExecComp('y = 3.*x', x={'units': 'm'}), promotes=['x'])
G2.set_input_defaults('x', units='cm')
msg = "Group 'G1' did not set a default 'units' for input 'x', so the value of (cm) from group 'G1.G2' will be used."
testlogger = TestLogger()
p.setup(check=True, logger=testlogger)
p.final_setup()
self.assertEqual(testlogger.get('warning')[1], msg)
def test_sub_sub_override2(self):
p = om.Problem()
model = p.model
G1 = model.add_subsystem('G1', om.Group())
G1.set_input_defaults('x', units='mm', val=1.)
G2 = G1.add_subsystem('G2', om.Group(), promotes=['x'])
G2.set_input_defaults('x', units='km')
G3 = G2.add_subsystem('G3', om.Group(), promotes=['x'])
G3.add_subsystem('C1', om.ExecComp('y = 3.*x', x={'units': 'm'}), promotes=['x'])
G3.add_subsystem('C2', om.ExecComp('y = 4.*x', x={'units': 'cm'}), promotes=['x'])
G3.set_input_defaults('x', units='cm')
testlogger = TestLogger()
p.setup(check=True, logger=testlogger)
msgs = [
"Groups 'G1' and 'G1.G2' called set_input_defaults for the input 'x' with conflicting 'units'. The value (mm) from 'G1' will be used.",
"Groups 'G1' and 'G1.G2.G3' called set_input_defaults for the input 'x' with conflicting 'units'. The value (mm) from 'G1' will be used."
]
p.final_setup()
self.assertEqual(testlogger.get('warning')[1], msgs[0])
self.assertEqual(testlogger.get('warning')[2], msgs[1])
def test_conflicting_units_multi_level_par(self):
# multiple Group.set_input_defaults calls at different tree levels with conflicting units args
p = self._make_tree_model(diff_units=True)
model = p.model
g2 = model._get_subsystem('G1.G2')
g2.set_input_defaults('x', units='ft')
g3 = model._get_subsystem('G1.G3')
g3.set_input_defaults('x', units='ft')
g4 = model._get_subsystem('par.G4')
g4.set_input_defaults('x', units='ft')
g5 = model._get_subsystem('par.G5')
g5.set_input_defaults('x', units='ft')
par = model._get_subsystem('par')
par.set_input_defaults('x', units='inch')
with self.assertRaises(Exception) as cm:
p.setup()
self.assertEqual(cm.exception.args[0], "<model> <class Group>: The subsystems G1.G2 and par called set_input_defaults for promoted input 'x' with conflicting values for 'units'. Call <group>.set_input_defaults('x', units=?), where <group> is the model to remove the ambiguity.")
def test_group_input_not_found(self):
p = self._make_tree_model(diff_units=True)
model = p.model
g2 = model._get_subsystem('G1.G2')
g2.set_input_defaults('xx', units='ft')
g3 = model._get_subsystem('G1.G3')
g3.set_input_defaults('x', units='ft')
g4 = model._get_subsystem('par.G4')
g4.set_input_defaults('x', units='ft')
g5 = model._get_subsystem('par.G5')
g5.set_input_defaults('x', units='ft')
with self.assertRaises(Exception) as cm:
p.setup()
self.assertEqual(cm.exception.args[0], "'G1.G2' <class Group>: The following group inputs, passed to set_input_defaults(), could not be found: ['xx'].")
def test_conflicting_val(self):
p = self._make_tree_model(diff_vals=True)
model = p.model
g2 = model._get_subsystem('G1.G2')
g2.set_input_defaults('x', val=3.0)
g3 = model._get_subsystem('G1.G3')
g3.set_input_defaults('x', val=3.0)
g4 = model._get_subsystem('par.G4')
g4.set_input_defaults('x', val=3.0)
g5 = model._get_subsystem('par.G5')
g5.set_input_defaults('x', val=3.0)
g1 = model._get_subsystem('G1')
g1.set_input_defaults('x', val=4.0)
with self.assertRaises(Exception) as cm:
p.setup()
self.assertEqual(cm.exception.args[0], "<model> <class Group>: The subsystems G1 and par.G4 called set_input_defaults for promoted input 'x' with conflicting values for 'value'. Call <group>.set_input_defaults('x', value=?), where <group> is the model to remove the ambiguity.")
class MultComp(om.ExplicitComponent):
"""
This class just performs a list of simple multiplications. It also keeps track of the number
of times _setup_var_data is called.
"""
def __init__(self, mults=(), inits=None, **kwargs):
super().__init__(**kwargs)
self.mults = list(mults)
self.var_setup_count = 0
if inits is None:
inits = {}
self.inits = inits
def _setup_var_data(self):
super()._setup_var_data()
self.var_setup_count += 1
def add_mult(self, inp, mult, out):
self.mults((inp, mult, out))
def setup(self):
all_ins = set([inp for inp, _, _ in self.mults])
all_outs = set([out for _, _, out in self.mults])
common = sorted(all_ins.intersection(all_outs))
if common:
raise RuntimeError(f"{common} are both inputs and outputs.")
out_list = [o for _, _, o in self.mults]
if len(all_outs) < len(out_list):
raise RuntimeError(f"Some outputs appear more than once.")
for inp, _, out in self.mults:
self.add_input(inp, val=self.inits.get(inp, 1.))
self.add_output(out, val=self.inits.get(out, 1.))
def compute(self, inputs, outputs):
for inp, mult, out in self.mults:
outputs[out] = mult * inputs[inp]
class ConfigGroup(om.Group):
"""
This group can add IO vars and promotes during configure. It also keeps track of how many
times _setup_var_data is called.
"""
def __init__(self, parallel=False, *args, **kwargs):
super().__init__(*args, **kwargs)
self.cfgproms = []
self.cfg_group_ins = []
self.cfgio = {}
self.cfg_invars = []
self.cfg_outvars = []
self.io_results = {}
self.var_setup_count = 0
if parallel:
self._mpi_proc_allocator.parallel = True
def _setup_var_data(self):
super()._setup_var_data()
self.var_setup_count += 1
def add_config_prom(self, child, prom):
self.cfgproms.append((child, prom))
def add_input_defaults(self, name, val=None, units=None):
self.cfg_group_ins.append((name, val, units))
def add_var_input(self, name, val=None, units=None):
self.cfg_invars.append((name, val, units))
def add_var_output(self, name, val=None, units=None):
self.cfg_outvars.append((name, val, units))
def add_get_io(self, child, **kwargs):
if child in self.cfgio:
raise RuntimeError(f"Can't set more than 1 call to get_io_metadata for child {child}.")
self.cfgio[child] = kwargs
def configure(self):
# retrieve metadata
for child, kwargs in self.cfgio.items():
kid = self._get_subsystem(child)
if kid is not None:
self.io_results[child] = kid.get_io_metadata(**kwargs)
else:
print(f"'{kid}' not found locally.")
# promotes
for child, prom in self.cfgproms:
if '.' in child:
parent, child = child.rsplit('.', 1)
s = self._get_subsystem(parent)
if s is None:
print(f"'{parent}' not found locally.")
continue
else:
s = self
s.promotes(child, any=prom)
# add inputs
for vpath, val, units in self.cfg_invars:
if '.' in vpath:
parent, vname = vpath.rsplit('.', 1)
s = self._get_subsystem(parent)
if s is None:
print(f"'{parent}' not found locally.")
continue
s.add_input(vname, val, units=units)
else:
raise RuntimeError("tried to add input var to a Group.")
# add outputs
for vpath, val, units in self.cfg_outvars:
if '.' in vpath:
parent, vname = vpath.rsplit('.', 1)
s = self._get_subsystem(parent)
if s is None:
print(f"'{parent}' not found locally.")
continue
s.add_output(vname, val, units=units)
else:
raise RuntimeError("tried to add output var to a Group.")
# set input defaults
for name, val, units in self.cfg_group_ins:
self.set_input_defaults(name, val=val, units=units)
class Test3Deep(unittest.TestCase):
"""
This creates a system tree with two levels of subgroups below model to allow testing of various
changes during configure that may change descendant systems that are not direct children.
"""
cfg_par = False
sub_par = False
def build_model(self):
p = om.Problem(model=ConfigGroup())
minprocs = 3 if self.cfg_par else 1
cfg = p.model.add_subsystem('cfg', ConfigGroup(parallel=self.cfg_par), min_procs=minprocs)
cfg.add_subsystem('C1', MultComp([('x', 2., 'y')]))
cfg.add_subsystem('C2', MultComp([('x', 3., 'y')]))
minprocs = 2 if self.sub_par else 1
sub = cfg.add_subsystem('sub', ConfigGroup(parallel=self.sub_par), min_procs=minprocs)
sub.add_subsystem('C3', MultComp([('x', 4., 'y')]))
sub.add_subsystem('C4', MultComp([('x', 5., 'y')]))
return p
def get_matching_var_setup_counts(self, p, count):
"""
Return pathnames of any systems that have a var_setup_count that matches 'count'.
"""
result = set()
for s in p.model.system_iter(include_self=True):
if hasattr(s, 'var_setup_count') and s.var_setup_count == count:
result.add(s.pathname)
if p.model.comm.size > 1:
newres = set()
for res in p.model.comm.allgather(result):
newres.update(res)
result = newres
return sorted(result)
def get_io_results(self, p, parent, path):
"""
Retrieve results of get_io_metadata calls that occurred during config.
Results are retrieved from all procs.
"""
s = p.model._get_subsystem(parent)
if s is None:
raise RuntimeError(f"No parent named {parent}.")
res = s.io_results[path]
if s.comm.size > 1:
allres = {}
for procres in s.comm.allgather(res):
allres.update(procres)
res = allres
return res
def check_vs_meta(self, p, parent, meta_dict):
"""
Compare the given metadata dict to the internal metadata dicts of the given parent.
"""
system = p.model._get_subsystem(parent)
metas = (system._var_allprocs_abs2meta['input'], system._var_allprocs_abs2meta['output'],
system._var_abs2meta['input'], system._var_abs2meta['output'])
for vname, meta in meta_dict.items():
for key, val in meta.items():
for mymeta in metas:
if key in mymeta:
if (isinstance(val, np.ndarray) and not np.testing.assert_allclose(val, mymeta[key])) or val != mymeta[key]:
raise RuntimeError(f"{val} != {mymeta[key]}")
break
def test_io_meta(self):
p = self.build_model()
p.model.cfg.add_get_io('C1', return_rel_names=False)
p.model.cfg.add_get_io('C2')
p.model.cfg.add_get_io('sub')
p.setup()
res = self.get_io_results(p, 'cfg', 'C1')
expected = {'cfg.C1.x', 'cfg.C1.y'}
self.assertEqual({n for n in res}, expected)
self.check_vs_meta(p, 'cfg', res)
res = self.get_io_results(p, 'cfg', 'C2')
expected = {'x', 'y'}
self.assertEqual({n for n in res}, expected)
self.check_vs_meta(p, 'cfg', res)
res = self.get_io_results(p, 'cfg', 'sub')
expected = {'C3.x', 'C4.x', 'C3.y', 'C4.y'}
self.assertEqual({n for n in res}, expected)
self.check_vs_meta(p, 'cfg', res)
names = self.get_matching_var_setup_counts(p, 1)
expected = {'', 'cfg', 'cfg.C1', 'cfg.C2', 'cfg.sub', 'cfg.sub.C3', 'cfg.sub.C4'}
self.assertEqual(names, sorted(expected))
def test_io_meta_local_bad_meta_key(self):
p = self.build_model()
p.model.cfg.add_get_io('sub', metadata_keys=('value', 'foo'))
with self.assertRaises(Exception) as cm:
p.setup()
self.assertEqual(cm.exception.args[0], "'cfg.sub' <class ConfigGroup>: ['foo'] are not valid metadata entry names.")
def test_promote_descendant(self):
p = self.build_model()
p.model.cfg.add_config_prom('sub.C3', ['x'])
p.model.cfg.add_config_prom('sub.C4', ['y'])
p.setup()
names = self.get_matching_var_setup_counts(p, 1)
expected = {'', 'cfg', 'cfg.C1', 'cfg.C2', 'cfg.sub.C3', 'cfg.sub.C4'}
self.assertEqual(names, sorted(expected))
names = self.get_matching_var_setup_counts(p, 2)
expected = {'cfg.sub'}
self.assertEqual(names, sorted(expected))
def test_promote_child(self):
p = self.build_model()
p.model.cfg.add_config_prom('C1', ['x'])
p.model.cfg.add_config_prom('C2', ['y'])
p.model.cfg.sub.add_config_prom('C3', ['x'])
p.model.cfg.sub.add_config_prom('C4', ['y'])
p.setup()
names = self.get_matching_var_setup_counts(p, 1)
expected = {'', 'cfg', 'cfg.C1', 'cfg.C2', 'cfg.sub', 'cfg.sub.C3', 'cfg.sub.C4'}
self.assertEqual(names, sorted(expected))
def test_add_input_to_child(self):
p = self.build_model()
p.model.cfg.sub.add_var_input('C3.ivar0', 3.0, units='ft')
p.setup()
names = self.get_matching_var_setup_counts(p, 1)
expected = {'', 'cfg', 'cfg.C1', 'cfg.C2', 'cfg.sub', 'cfg.sub.C4'}
self.assertEqual(names, sorted(expected))
names = self.get_matching_var_setup_counts(p, 2)
expected = {'cfg.sub.C3'}
self.assertEqual(names, sorted(expected))
def test_add_output_to_child(self):
p = self.build_model()
p.model.cfg.sub.add_var_output('C3.ovar0', 3.0, units='ft')
p.setup()
names = self.get_matching_var_setup_counts(p, 1)
expected = {'', 'cfg', 'cfg.C1', 'cfg.C2', 'cfg.sub', 'cfg.sub.C4'}
self.assertEqual(names, sorted(expected))
names = self.get_matching_var_setup_counts(p, 2)
expected = {'cfg.sub.C3'}
self.assertEqual(names, sorted(expected))
def test_add_input_to_descendant(self):
p = self.build_model()
p.model.cfg.add_var_input('sub.C3.ivar0', 3.0, units='ft')
p.model.add_var_input('cfg.sub.C3.ivar1', 4.0, units='inch')
p.setup()
names = self.get_matching_var_setup_counts(p, 1)
expected = {'', 'cfg.C1', 'cfg.C2', 'cfg.sub.C4'}
self.assertEqual(names, sorted(expected))
names = self.get_matching_var_setup_counts(p, 2)
expected = {'cfg'}
self.assertEqual(names, sorted(expected))
names = self.get_matching_var_setup_counts(p, 3)
expected = {'cfg.sub', 'cfg.sub.C3'}
self.assertEqual(names, sorted(expected))
def test_add_output_to_descendant(self):
p = self.build_model()
p.model.cfg.add_var_output('sub.C3.ovar0', 3.0, units='ft')
p.model.add_var_output('cfg.sub.C3.ovar1', 4.0, units='inch')
p.setup()
names = self.get_matching_var_setup_counts(p, 1)
expected = {'', 'cfg.C1', 'cfg.C2', 'cfg.sub.C4'}
self.assertEqual(names, sorted(expected))
names = self.get_matching_var_setup_counts(p, 2)
expected = {'cfg'}
self.assertEqual(names, sorted(expected))
names = self.get_matching_var_setup_counts(p, 3)
expected = {'cfg.sub', 'cfg.sub.C3'}
self.assertEqual(names, sorted(expected))
@unittest.skipUnless(MPI and PETScVector, "MPI and PETSc are required.")
class TestInConfigMPIpar(Test3Deep):
N_PROCS = 2
sub_par = True
def test_io_meta_remote(self):
p = self.build_model()
p.model.add_get_io('cfg', metadata_keys=('value', 'src_indices', 'shape'), get_remote=True)
p.model.cfg.add_get_io('sub')
p.setup()
res = p.model.io_results['cfg']
expected = {'sub.C3.x', 'sub.C3.y', 'sub.C4.x', 'sub.C4.y', 'C1.x', 'C1.y', 'C2.x', 'C2.y'}
self.assertEqual(sorted(res), sorted(expected))
self.check_vs_meta(p, 'cfg', res)
res = p.model.cfg.io_results['sub']
if p.model.comm.rank == 0:
expected = {'C3.y', 'C3.x'}
else:
expected = {'C4.y', 'C4.x'}
self.assertEqual(sorted(res), sorted(expected))
self.check_vs_meta(p, 'cfg.sub', res)
@unittest.skipUnless(MPI and PETScVector, "MPI and PETSc are required.")
class TestInConfigMPIparpar(Test3Deep):
N_PROCS = 4
cfg_par = True
sub_par = True
#
# Feature Tests
#
class TestFeatureAddSubsystem(unittest.TestCase):
def test_group_simple(self):
import openmdao.api as om
p = om.Problem()
p.model.add_subsystem('comp1', om.ExecComp('b=2.0*a', a=3.0, b=6.0))
p.setup()
self.assertEqual(p.get_val('comp1.a'), 3.0)
self.assertEqual(p.get_val('comp1.b'), 6.0)
def test_group_simple_promoted(self):
import openmdao.api as om
p = om.Problem()
p.model.add_subsystem('indep', om.IndepVarComp('a', 3.0),
promotes_outputs=['a'])
p.model.add_subsystem('comp1', om.ExecComp('b=2.0*a'),
promotes_inputs=['a'])
p.setup()
p.run_model()
self.assertEqual(p.get_val('a'), 3.0)
self.assertEqual(p.get_val('comp1.b'), 6.0)
def test_group_nested(self):
import openmdao.api as om
p = om.Problem()
p.model.add_subsystem('G1', om.Group())
p.model.G1.add_subsystem('comp1', om.ExecComp('b=2.0*a', a=3.0, b=6.0))
p.model.G1.add_subsystem('comp2', om.ExecComp('b=3.0*a', a=4.0, b=12.0))
p.setup()
self.assertEqual(p.get_val('G1.comp1.a'), 3.0)
self.assertEqual(p.get_val('G1.comp1.b'), 6.0)
self.assertEqual(p.get_val('G1.comp2.a'), 4.0)
self.assertEqual(p.get_val('G1.comp2.b'), 12.0)
def test_group_nested_promoted1(self):
import openmdao.api as om
# promotes from bottom level up 1
p = om.Problem()
g1 = p.model.add_subsystem('G1', om.Group())
g1.add_subsystem('comp1', om.ExecComp('b=2.0*a', a=3.0, b=6.0),
promotes_inputs=['a'], promotes_outputs=['b'])
g1.add_subsystem('comp2', om.ExecComp('b=3.0*a', a=4.0, b=12.0),
promotes_inputs=['a'])
g1.set_input_defaults('a', val=3.5)
p.setup()
# output G1.comp1.b is promoted
self.assertEqual(p.get_val('G1.b'), 6.0)
# output G1.comp2.b is not promoted
self.assertEqual(p.get_val('G1.comp2.b'), 12.0)
# use unpromoted names for the following 2 promoted inputs
self.assertEqual(p.get_val('G1.comp1.a'), 3.5)
self.assertEqual(p.get_val('G1.comp2.a'), 3.5)
def test_group_nested_promoted2(self):
import openmdao.api as om
# promotes up from G1 level
p = om.Problem()
g1 = om.Group()
g1.add_subsystem('comp1', om.ExecComp('b=2.0*a', a=3.0, b=6.0))
g1.add_subsystem('comp2', om.ExecComp('b=3.0*a', a=4.0, b=12.0))
# use glob pattern 'comp?.a' to promote both comp1.a and comp2.a
# use glob pattern 'comp?.b' to promote both comp1.b and comp2.b
p.model.add_subsystem('G1', g1,
promotes_inputs=['comp?.a'],
promotes_outputs=['comp?.b'])
p.setup()
# output G1.comp1.b is promoted
self.assertEqual(p.get_val('comp1.b'), 6.0)
# output G1.comp2.b is promoted
self.assertEqual(p.get_val('comp2.b'), 12.0)
# access both promoted inputs using unpromoted names.
self.assertEqual(p.get_val('G1.comp1.a'), 3.0)
self.assertEqual(p.get_val('G1.comp2.a'), 4.0)
def test_group_rename_connect(self):
import openmdao.api as om
p = om.Problem()
p.model.add_subsystem('indep', om.IndepVarComp('aa', 3.0),
promotes=['aa'])
p.model.add_subsystem('comp1', om.ExecComp('b=2.0*aa'),
promotes_inputs=['aa'])
# here we alias 'a' to 'aa' so that it will be automatically
# connected to the independent variable 'aa'.
p.model.add_subsystem('comp2', om.ExecComp('b=3.0*a'),
promotes_inputs=[('a', 'aa')])
p.setup()
p.run_model()
self.assertEqual(p.get_val('comp1.b'), 6.0)
self.assertEqual(p.get_val('comp2.b'), 9.0)
def test_promotes_any(self):
import openmdao.api as om
class SimpleGroup(om.Group):
def setup(self):
self.add_subsystem('comp1', om.IndepVarComp('x', 5.0))
self.add_subsystem('comp2', om.ExecComp('b=2*a'))
def configure(self):
self.promotes('comp1', any=['*'])
top = om.Problem(model=SimpleGroup())
top.setup()
self.assertEqual(top.get_val('x'), 5)
def test_promotes_inputs_and_outputs(self):
import openmdao.api as om
class SimpleGroup(om.Group):
def setup(self):
self.add_subsystem('comp1', om.IndepVarComp('x', 5.0))
self.add_subsystem('comp2', om.ExecComp('b=2*a'))
def configure(self):
self.promotes('comp2', inputs=['a'], outputs=['b'])
top = om.Problem(model=SimpleGroup())
top.setup()
self.assertEqual(top.get_val('a'), 1)
self.assertEqual(top.get_val('b'), 1)
class TestFeatureConnect(unittest.TestCase):
def test_basic_connect_units(self):
import numpy as np
import openmdao.api as om
p = om.Problem()
p.model.set_input_defaults('x', np.ones(5), units='ft')
exec_comp = om.ExecComp('y=sum(x)',
x={'value': np.zeros(5), 'units': 'inch'},
y={'units': 'inch'})
p.model.add_subsystem('comp1', exec_comp, promotes_inputs=['x'])
p.setup()
p.run_model()
assert_near_equal(p.get_val('x', units='ft'), np.ones(5))
assert_near_equal(p.get_val('comp1.x'), np.ones(5)*12.)
assert_near_equal(p.get_val('comp1.y'), 60.)
def test_connect_1_to_many(self):
import numpy as np
import openmdao.api as om
p = om.Problem()
p.model.add_subsystem('C1', om.ExecComp('y=sum(x)*2.0', x=np.zeros(5)), promotes_inputs=['x'])
p.model.add_subsystem('C2', om.ExecComp('y=sum(x)*4.0', x=np.zeros(5)), promotes_inputs=['x'])
p.model.add_subsystem('C3', om.ExecComp('y=sum(x)*6.0', x=np.zeros(5)), promotes_inputs=['x'])
p.setup()
p.set_val('x', np.ones(5))
p.run_model()
assert_near_equal(p.get_val('C1.y'), 10.)
assert_near_equal(p.get_val('C2.y'), 20.)
assert_near_equal(p.get_val('C3.y'), 30.)
def test_connect_src_indices(self):
import numpy as np
import openmdao.api as om
p = om.Problem()
p.model.add_subsystem('indep', om.IndepVarComp('x', np.ones(5)))
p.model.add_subsystem('C1', om.ExecComp('y=sum(x)*2.0', x=np.zeros(3)))
p.model.add_subsystem('C2', om.ExecComp('y=sum(x)*4.0', x=np.zeros(2)))
# connect C1.x to the first 3 entries of indep.x
p.model.connect('indep.x', 'C1.x', src_indices=[0, 1, 2])
# connect C2.x to the last 2 entries of indep.x
# use -2 (same as 3 in this case) to show that negative indices work.
p.model.connect('indep.x', 'C2.x', src_indices=[-2, 4])
p.setup()
p.run_model()
assert_near_equal(p['C1.x'], np.ones(3))
assert_near_equal(p['C1.y'], 6.)
assert_near_equal(p['C2.x'], np.ones(2))
assert_near_equal(p['C2.y'], 8.)
def test_connect_src_indices_noflat(self):
import numpy as np
import openmdao.api as om
p = om.Problem()
p.model.add_subsystem('indep', om.IndepVarComp('x', np.arange(12).reshape((4, 3))))
p.model.add_subsystem('C1', om.ExecComp('y=sum(x)*2.0', x=np.zeros((2, 2))))
# connect C1.x to entries (0,0), (-1,1), (2,1), (1,1) of indep.x
p.model.connect('indep.x', 'C1.x',
src_indices=[[(0, 0), (-1, 1)],
[(2, 1), (1, 1)]], flat_src_indices=False)
p.setup()
p.run_model()
assert_near_equal(p['indep.x'], np.array([[0., 1., 2.],
[3., 4., 5.],
[6., 7., 8.],
[9., 10., 11.]]))
assert_near_equal(p['C1.x'], np.array([[0., 10.],
[7., 4.]]))
assert_near_equal(p['C1.y'], 42.)
class TestFeatureSrcIndices(unittest.TestCase):
def test_promote_src_indices(self):
import numpy as np
import openmdao.api as om
class MyComp1(om.ExplicitComponent):
def setup(self):
# this input will connect to entries 0, 1, and 2 of its source
self.add_input('x', np.ones(3), src_indices=[0, 1, 2])
self.add_output('y', 1.0)
def compute(self, inputs, outputs):
outputs['y'] = np.sum(inputs['x'])*2.0
class MyComp2(om.ExplicitComponent):
def setup(self):
# this input will connect to entries 3 and 4 of its source
self.add_input('x', np.ones(2), src_indices=[3, 4])
self.add_output('y', 1.0)
def compute(self, inputs, outputs):
outputs['y'] = np.sum(inputs['x'])*4.0
p = om.Problem()
# by promoting the following output and inputs to 'x', they will
# be automatically connected
p.model.add_subsystem('indep', om.IndepVarComp('x', np.ones(5)),
promotes_outputs=['x'])
p.model.add_subsystem('C1', MyComp1(), promotes_inputs=['x'])
p.model.add_subsystem('C2', MyComp2(), promotes_inputs=['x'])
p.setup()
p.run_model()
assert_near_equal(p.get_val('C1.x'), np.ones(3))
assert_near_equal(p.get_val('C1.y'), 6.)
assert_near_equal(p.get_val('C2.x'), np.ones(2))
assert_near_equal(p.get_val('C2.y'), 8.)
def test_promote_src_indices_nonflat(self):
import numpy as np
import openmdao.api as om
class MyComp(om.ExplicitComponent):
def setup(self):
# We want to pull the following 4 values out of the source:
# [(0,0), (3,1), (2,1), (1,1)].
# Because our input is also non-flat we arrange the
# source index tuples into an array having the same shape
# as our input. If we didn't set flat_src_indices to False,
# we could specify src_indices as a 1D array of indices into
# the flattened source.
self.add_input('x', np.ones((2, 2)),
src_indices=[[(0, 0), (3, 1)],
[(2, 1), (1, 1)]],
flat_src_indices=False)
self.add_output('y', 1.0)
def compute(self, inputs, outputs):
outputs['y'] = np.sum(inputs['x'])
p = om.Problem()
# by promoting the following output and inputs to 'x', they will
# be automatically connected
p.model.add_subsystem('indep',
om.IndepVarComp('x', np.arange(12).reshape((4, 3))),
promotes_outputs=['x'])
p.model.add_subsystem('C1', MyComp(),
promotes_inputs=['x'])
p.setup()
p.run_model()
assert_near_equal(p.get_val('C1.x'),
np.array([[0., 10.],
[7., 4.]]))
assert_near_equal(p.get_val('C1.y'), 21.)
def test_group_promotes_src_indices(self):
import numpy as np
import openmdao.api as om
class MyComp1(om.ExplicitComponent):
""" multiplies input array by 2. """
def setup(self):
self.add_input('x', np.ones(3))
self.add_output('y', 1.0)
def compute(self, inputs, outputs):
outputs['y'] = np.sum(inputs['x'])*2.0
class MyComp2(om.ExplicitComponent):
""" multiplies input array by 4. """
def setup(self):
self.add_input('x', np.ones(2))
self.add_output('y', 1.0)
def compute(self, inputs, outputs):
outputs['y'] = np.sum(inputs['x'])*4.0
class MyGroup(om.Group):
def setup(self):
self.add_subsystem('comp1', MyComp1())
self.add_subsystem('comp2', MyComp2())
def configure(self):
# splits input via promotes using src_indices
self.promotes('comp1', inputs=['x'], src_indices=[0, 1, 2])
self.promotes('comp2', inputs=['x'], src_indices=[3, 4])
p = om.Problem()
# p.model.add_subsystem('indep', om.IndepVarComp('x', np.ones(5)),
# promotes_outputs=['x'])
p.model.set_input_defaults('x', np.ones(5))
p.model.add_subsystem('G1', MyGroup(), promotes_inputs=['x'])
p.setup()
p.set_val('x', np.array(range(5)))
inp = np.array(range(5))
p.run_model()
assert_near_equal(p.get_val('G1.comp1.x'), inp[:3])
assert_near_equal(p.get_val('G1.comp2.x'), inp[3:])
assert_near_equal(p.get_val('G1.comp1.y'), np.sum(inp[:3]*2))
assert_near_equal(p.get_val('G1.comp2.y'), np.sum(inp[3:]*4))
class TestFeatureSetOrder(unittest.TestCase):
def test_set_order(self):
import openmdao.api as om
class ReportOrderComp(om.ExplicitComponent):
"""Adds name to list."""
def __init__(self, order_list):
super().__init__()
self._order_list = order_list
def compute(self, inputs, outputs):
self._order_list.append(self.pathname)
# this list will record the execution order of our C1, C2, and C3 components
order_list = []
prob = om.Problem()
model = prob.model
model.add_subsystem('C1', ReportOrderComp(order_list))
model.add_subsystem('C2', ReportOrderComp(order_list))
model.add_subsystem('C3', ReportOrderComp(order_list))
prob.setup()
prob.run_model()
self.assertEqual(order_list, ['C1', 'C2', 'C3'])
# reset the shared order list
order_list[:] = []
prob.setup()
# now swap C2 and C1 in the order
model.set_order(['C2', 'C1', 'C3'])
# after changing the order, we must call setup again
prob.setup()
prob.run_model()
self.assertEqual(order_list, ['C2', 'C1', 'C3'])
class TestFeatureGetSubsystem(unittest.TestCase):
def test_group_getsystem_top(self):
import openmdao.api as om
from openmdao.core.tests.test_group import BranchGroup
p = om.Problem(model=BranchGroup())
p.setup()
c1 = p.model.Branch1.G1.G2.comp1
self.assertEqual(c1.pathname, 'Branch1.G1.G2.comp1')
c2 = p.model.Branch2.G3.comp2
self.assertEqual(c2.pathname, 'Branch2.G3.comp2')
class TestFeatureConfigure(unittest.TestCase):
def test_system_configure(self):
import openmdao.api as om
class ImplSimple(om.ImplicitComponent):
def setup(self):
self.add_input('a', val=1.)
self.add_output('x', val=0.)
def apply_nonlinear(self, inputs, outputs, residuals):
residuals['x'] = np.exp(outputs['x']) - \
inputs['a']**2 * outputs['x']**2
def linearize(self, inputs, outputs, jacobian):
jacobian['x', 'x'] = np.exp(outputs['x']) - \
2 * inputs['a']**2 * outputs['x']
jacobian['x', 'a'] = -2 * inputs['a'] * outputs['x']**2
class Sub(om.Group):
def setup(self):
self.add_subsystem('comp', ImplSimple())
def configure(self):
# This solver won't solve the system. We want
# to override it in the parent.
self.nonlinear_solver = om.NonlinearBlockGS()
class Super(om.Group):
def setup(self):
self.add_subsystem('sub', Sub())
def configure(self):
# This will solve it.
self.sub.nonlinear_solver = om.NewtonSolver(solve_subsystems=False)
self.sub.linear_solver = om.ScipyKrylov()
top = om.Problem(model=Super())
top.setup()
self.assertTrue(isinstance(top.model.sub.nonlinear_solver, om.NewtonSolver))
self.assertTrue(isinstance(top.model.sub.linear_solver, om.ScipyKrylov))
def test_configure_set_input_defaults(self):
class ConfigGroup(om.Group):
def configure(self):
self.set_input_defaults('x', val=99.)
p = om.Problem(model=ConfigGroup())
C1 = p.model.add_subsystem('C1', om.ExecComp('y=2*x'), promotes_inputs=['x'])
C2 = p.model.add_subsystem('C2', om.ExecComp('y=3*x'), promotes_inputs=['x'])
p.setup()
self.assertEqual(p['x'], 99.)
def test_configure_add_input_output(self):
"""
A simple example to compute the resultant force on an aircraft using data
from an external source. Demonstrates adding I/O in the 'configure' method.
"""
import numpy as np
import openmdao.api as om
class FlightDataComp(om.ExplicitComponent):
"""
Simulate data generated by an external source/code
"""
def setup(self):
# number of points may not be known a priori
n = 3
# The vector represents forces at n time points (rows) in 2 dimensional plane (cols)
self.add_output(name='thrust', shape=(n, 2), units='kN')
self.add_output(name='drag', shape=(n, 2), units='kN')
self.add_output(name='lift', shape=(n, 2), units='kN')
self.add_output(name='weight', shape=(n, 2), units='kN')
def compute(self, inputs, outputs):
outputs['thrust'][:, 0] = [500, 600, 700]
outputs['drag'][:, 0] = [400, 400, 400]
outputs['weight'][:, 1] = [1000, 1001, 1002]
outputs['lift'][:, 1] = [1000, 1000, 1000]
class ForceModel(om.Group):
def setup(self):
self.add_subsystem('flightdatacomp', FlightDataComp(),
promotes_outputs=['thrust', 'drag', 'lift', 'weight'])
self.add_subsystem('totalforcecomp', om.AddSubtractComp())
def configure(self):
# Some models that require self-interrogation need to be able to add
# I/O in components from the configure method of their containing groups.
# In this case, we can only determine the 'vec_size' for totalforcecomp
# after flightdatacomp has been setup.
meta = self.flightdatacomp.get_io_metadata('output', includes='thrust')
data_shape = meta['thrust']['shape']
self.totalforcecomp.add_equation('total_force',
input_names=['thrust', 'drag', 'lift', 'weight'],
vec_size=data_shape[0], length=data_shape[1],
scaling_factors=[1, -1, 1, -1], units='kN')
self.connect('thrust', 'totalforcecomp.thrust')
self.connect('drag', 'totalforcecomp.drag')
self.connect('lift', 'totalforcecomp.lift')
self.connect('weight', 'totalforcecomp.weight')
p = om.Problem(model=ForceModel())
p.setup()
p.run_model()
assert_near_equal(p.get_val('totalforcecomp.total_force', units='kN'),
np.array([[100, 200, 300], [0, -1, -2]]).T)
def test_configure_add_input_output_list_io_group(self):
"""
Like the example above but system we're calling list_outputs on is a Group.
"""
import numpy as np
import openmdao.api as om
class FlightDataComp(om.ExplicitComponent):
"""
Simulate data generated by an external source/code
"""
def setup(self):
# number of points may not be known a priori
n = 3
# The vector represents forces at n time points (rows) in 2 dimensional plane (cols)
self.add_output(name='thrust', shape=(n, 2), units='kN')
self.add_output(name='drag', shape=(n, 2), units='kN')
self.add_output(name='lift', shape=(n, 2), units='kN')
self.add_output(name='weight', shape=(n, 2), units='kN')
def compute(self, inputs, outputs):
outputs['thrust'][:, 0] = [500, 600, 700]
outputs['drag'][:, 0] = [400, 400, 400]
outputs['weight'][:, 1] = [1000, 1001, 1002]
outputs['lift'][:, 1] = [1000, 1000, 1000]
class ForceModel(om.Group):
def setup(self):
fdgroup = om.Group()
fdgroup.add_subsystem('flightdatacomp', FlightDataComp(),
promotes_outputs=['thrust', 'drag', 'lift', 'weight'])
self.add_subsystem('flightdatagroup', fdgroup,
promotes_outputs=['thrust', 'drag', 'lift', 'weight'])
self.add_subsystem('totalforcecomp', om.AddSubtractComp())
def configure(self):
# Some models that require self-interrogation need to be able to add
# I/O in components from the configure method of their containing groups.
# In this case, we can only determine the 'vec_size' for totalforcecomp
# after flightdatagroup has been setup.
flight_data = dict(self.flightdatagroup.list_outputs(shape=True, prom_name=True,
out_stream=None))
data_shape = flight_data['flightdatacomp.thrust']['shape']
self.totalforcecomp.add_equation('total_force',
input_names=['thrust', 'drag', 'lift', 'weight'],
vec_size=data_shape[0], length=data_shape[1],
scaling_factors=[1, -1, 1, -1], units='kN')
self.connect('thrust', 'totalforcecomp.thrust')
self.connect('drag', 'totalforcecomp.drag')
self.connect('lift', 'totalforcecomp.lift')
self.connect('weight', 'totalforcecomp.weight')
p = om.Problem(model=ForceModel())
p.setup()
p.run_model()
assert_near_equal(p.get_val('totalforcecomp.total_force', units='kN'),
np.array([[100, 200, 300], [0, -1, -2]]).T)
def test_configure_dyn_shape_err(self):
class MyComp(om.ExplicitComponent):
def setup(self):
self.add_input('x', shape_by_conn=True, copy_shape='y')
self.add_output('y', shape_by_conn=True, copy_shape='x')
def compute(self, inputs, outputs):
outputs['y'] = 3*inputs['x']
class MyGroup(om.Group):
def setup(self):
self.add_subsystem('comp', MyComp())
def configure(self):
meta = self.comp.get_io_metadata('output', includes='y')
p = om.Problem()
p.model.add_subsystem("G", MyGroup())
p.model.add_subsystem("sink", om.ExecComp('y=5*x'))
p.model.connect('G.comp.y', 'sink.x')
with self.assertRaises(RuntimeError) as cm:
p.setup()
msg="'G.comp' <class MyComp>: Can't retrieve shape, size, or value for dynamically sized variable 'y' because they aren't known yet."
self.assertEqual(str(cm.exception), msg)
class TestFeatureGuessNonlinear(unittest.TestCase):
def test_guess_nonlinear(self):
import openmdao.api as om
import numpy as np
class Discipline(om.Group):
def setup(self):
self.add_subsystem('comp0', om.ExecComp('y=x**2'))
self.add_subsystem('comp1', om.ExecComp('z=2*external_input'),
promotes_inputs=['external_input'])
self.add_subsystem('balance', om.BalanceComp('x', lhs_name='y', rhs_name='z'),
promotes_outputs=['x'])
self.connect('comp0.y', 'balance.y')
self.connect('comp1.z', 'balance.z')
self.connect('x', 'comp0.x')
self.nonlinear_solver = om.NewtonSolver(iprint=2, solve_subsystems=True)
self.linear_solver = om.DirectSolver()
def guess_nonlinear(self, inputs, outputs, residuals):
# Check residuals
if np.abs(residuals['x']) > 1.0E-2:
# inputs are addressed using full path name, regardless of promotion
external_input = inputs['comp1.external_input']
# balance drives x**2 = 2*external_input
x_guess = (2*external_input)**.5
# outputs are addressed by the their promoted names
outputs['x'] = x_guess # perfect guess should converge in 0 iterations
p = om.Problem()
p.model.add_subsystem('discipline', Discipline(), promotes_inputs=['external_input'])
p.setup()
p.set_val('external_input', 1.)
p.run_model()
self.assertEqual(p.model.nonlinear_solver._iter_count, 0)
assert_near_equal(p.get_val('discipline.x'), 1.41421356, 1e-6)
class TestNaturalNaming(unittest.TestCase):
def test_buried_proms(self):
p = om.Problem()
model = p.model
g1 = model.add_subsystem('g1', om.Group())
g2 = g1.add_subsystem('g2', om.Group(), promotes=['*'])
g3 = g2.add_subsystem('g3', om.Group())
g4 = g3.add_subsystem('g4', om.Group(), promotes=['*'])
c1 = g4.add_subsystem('c1', om.ExecComp('y=2.0*x', x=7., y=9.), promotes=['x','y'])
p.setup()
full_in = 'g1.g2.g3.g4.c1.x'
full_out = 'g1.g2.g3.g4.c1.y'
prom_ins = ['g1.g2.g3.g4.x', 'g1.g2.g3.x', 'g1.g3.x']
for prom in prom_ins:
self.assertEqual(name2abs_names(model, prom), [full_in])
prom_outs = ['g1.g2.g3.g4.y', 'g1.g2.g3.y', 'g1.g3.y']
for prom in prom_outs:
self.assertEqual(name2abs_names(model, prom), [full_out])
# check setting/getting before final setup
for name in prom_ins + [full_in]:
self.assertEqual(p[name], 7.)
self.assertEqual(g3.get_val('x', get_remote=True), 7.)
# we allow 'g1.g3.x' here even though it isn't relative to g3,
# because it maps to an absolute name that is contained in g3.
self.assertEqual(g3.get_val('g1.g3.x', get_remote=True), 7.)
for name in prom_outs + [full_out]:
self.assertEqual(p[name], 9.)
incount = 0
for name in prom_ins + [full_in]:
incount += 1
p[name] = 77. + incount
self.assertEqual(p[name], 77. + incount)
outcount = 0
for name in prom_outs + [full_out]:
outcount += 1
p[name] = 99. + outcount
self.assertEqual(p[name], 99. + outcount)
p.final_setup()
# now check after final setup
for name in prom_ins + [full_in]:
self.assertEqual(p[name], 77. + incount)
self.assertEqual(g3.get_val('x', get_remote=True), 77. + incount)
for name in prom_outs + [full_out]:
self.assertEqual(p[name], 99. + outcount)
incount = 0
for name in prom_ins + [full_in]:
incount += 1
p[name] = 7. + incount
self.assertEqual(p[name], 7. + incount)
outcount = 0
for name in prom_outs + [full_out]:
outcount += 1
p[name] = 9. + outcount
self.assertEqual(p[name], 9. + outcount)
@unittest.skipUnless(MPI and PETScVector, "MPI and PETSc are required.")
class TestNaturalNamingMPI(unittest.TestCase):
N_PROCS = 2
def test_buried_proms(self):
p = om.Problem()
model = p.model
par = model.add_subsystem('par', om.ParallelGroup())
g1 = par.add_subsystem('g1', om.Group())
g2 = g1.add_subsystem('g2', om.Group(), promotes=['*'])
g3 = g2.add_subsystem('g3', om.Group())
g4 = g3.add_subsystem('g4', om.Group(), promotes=['*'])
c1 = g4.add_subsystem('c1', om.ExecComp('y=2.0*x', x=7., y=9.), promotes=['x','y'])
g1a = par.add_subsystem('g1a', om.Group())
g2a = g1a.add_subsystem('g2', om.Group(), promotes=['*'])
g3a = g2a.add_subsystem('g3', om.Group())
g4a = g3a.add_subsystem('g4', om.Group(), promotes=['*'])
c1 = g4a.add_subsystem('c1', om.ExecComp('y=2.0*x', x=7., y=9.), promotes=['x','y'])
p.setup()
for gtop in ['par.g1', 'par.g1a']:
full_in = f'{gtop}.g2.g3.g4.c1.x'
full_out = f'{gtop}.g2.g3.g4.c1.y'
prom_ins = [f'{gtop}.g2.g3.g4.x', f'{gtop}.g2.g3.x', f'{gtop}.g3.x']
for prom in prom_ins:
self.assertEqual(name2abs_names(model, prom), [full_in])
prom_outs = [f'{gtop}.g2.g3.g4.y', f'{gtop}.g2.g3.y', f'{gtop}.g3.y']
for prom in prom_outs:
self.assertEqual(name2abs_names(model, prom), [full_out])
# check setting/getting before final setup
for name in prom_ins + [full_in]:
self.assertEqual(p.get_val(name, get_remote=True), 7.)
for name in prom_outs + [full_out]:
self.assertEqual(p.get_val(name, get_remote=True), 9.)
incount = 0
for name in prom_ins + [full_in]:
incount += 1
p[name] = 77. + incount
p.model.comm.barrier()
self.assertEqual(p.get_val(name, get_remote=True), 77. + incount)
outcount = 0
for name in prom_outs + [full_out]:
outcount += 1
p[name] = 99. + outcount
p.model.comm.barrier()
self.assertEqual(p.get_val(name, get_remote=True), 99. + outcount)
p.final_setup()
# now check after final setup
for gtop in ['par.g1', 'par.g1a']:
full_in = f'{gtop}.g2.g3.g4.c1.x'
full_out = f'{gtop}.g2.g3.g4.c1.y'
for name in prom_ins + [full_in]:
self.assertEqual(p.get_val(name, get_remote=True), 77. + incount)
for name in prom_outs + [full_out]:
self.assertEqual(p.get_val(name, get_remote=True), 99. + outcount)
for gtop in ['par.g1', 'par.g1a']:
full_in = f'{gtop}.g2.g3.g4.c1.x'
full_out = f'{gtop}.g2.g3.g4.c1.y'
incount = 0
for name in prom_ins + [full_in]:
incount += 1
p[name] = 7. + incount
p.model.comm.barrier()
self.assertEqual(p.get_val(name, get_remote=True), 7. + incount)
outcount = 0
for name in prom_outs + [full_out]:
outcount += 1
p[name] = 9. + outcount
p.model.comm.barrier()
self.assertEqual(p.get_val(name, get_remote=True), 9. + outcount)
self.assertEqual(set(p.model._vars_to_gather),
{'par.g1.g2.g3.g4.c1.x', 'par.g1a.g2.g3.g4.c1.x', 'par.g1.g2.g3.g4.c1.y', 'par.g1a.g2.g3.g4.c1.y'})
if __name__ == "__main__":
unittest.main()
| 37.671318
| 301
| 0.553994
| 19,816
| 151,175
| 4.081197
| 0.042895
| 0.058314
| 0.042462
| 0.032718
| 0.801083
| 0.767574
| 0.739629
| 0.708914
| 0.678966
| 0.654607
| 0
| 0.031404
| 0.291636
| 151,175
| 4,012
| 302
| 37.680708
| 0.723804
| 0.041303
| 0
| 0.63288
| 0
| 0.011461
| 0.130484
| 0.00346
| 0
| 0
| 0
| 0
| 0.143625
| 1
| 0.118911
| false
| 0.000358
| 0.017908
| 0
| 0.178367
| 0.005731
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
98fa25302361a60278a0e6a24c143acc35cf1124
| 119,945
|
py
|
Python
|
System_monitoringu/skrypt_sterujacy.py
|
Beanarny/Praca_inz
|
38f843af8deeb1f1be6c77b553cfdcc4ad2a7c00
|
[
"MIT"
] | null | null | null |
System_monitoringu/skrypt_sterujacy.py
|
Beanarny/Praca_inz
|
38f843af8deeb1f1be6c77b553cfdcc4ad2a7c00
|
[
"MIT"
] | null | null | null |
System_monitoringu/skrypt_sterujacy.py
|
Beanarny/Praca_inz
|
38f843af8deeb1f1be6c77b553cfdcc4ad2a7c00
|
[
"MIT"
] | null | null | null |
import sys
from PyQt5 import QtWidgets
from PyQt5.uic import loadUi
from time import sleep
import csv
import mysql.connector
import serial
import datetime
import matplotlib.pyplot as plt
import hashlib
import numpy as np
from PyQt5.QtGui import *
from PyQt5.QtWidgets import *
from PyQt5.QtCore import *
from pyfirmata import Arduino
result = None ## HASLO ZMIENIONE, NOWA BAZA !!! user / userpass
while result is None: # wykonuje sie bez konca, jezeli nie uda sie polaczyc, potrzebne do logowania, ale infinite loop
try:
# auth = input("Podaj haslo do bazy:\n") # przeniesc to do "maina", wykonanie przed poczatkiem programu
cnx = mysql.connector.connect(user = 'user', password = 'userpass', host = 'localhost', database = 'main_db')
result = cnx
# print("...Connection established...")
except:
# print("Connection failed")
pass
cursor = cnx.cursor(buffered=True)
########################################################################
def encrypt_string(hash_string):
sha_signature = \
hashlib.sha256(hash_string.encode()).hexdigest()
return sha_signature
########################################################################
####################### stworzenie pracownika admin/admin do logowania, jeżeli jeszcze nie istnieje, aby móc zalogować się po raz pierwszy
####################### potem hasło admina należy zmienić wchodząc w Edycje danych pracownika -> zmień hasło
cursor.execute("SELECT login FROM personel WHERE login LIKE \'admin\'")
myresult = cursor.fetchall()
try:
x = myresult[0]
# print("admin istnieje")
print("Logowanie...")
except:
try:
print("Pierwsze logowanie...")
haslo_admina = encrypt_string("admin")
cursor.execute("INSERT INTO personel (imie, nazwisko, plec, data_urodzenia, PESEL, data_zatrudnienia, login, zaszyfrowane_haslo, telefon, email, kod_pocztowy, miejscowosc, ulica)\
VALUES (\'admin\',\'admin\',\'Mezczyzna\',\'2020-08-08\',\'55667712345\',\'2020-08-08\',\'admin\',\'{encr_admin_pass}\',\'a\',\'a\',\'a',\'a\',\'a\')".format(encr_admin_pass=haslo_admina))
cnx.commit()
# print("Pomyslnie dodano testowego admina")
except Exception as e: print (e)
########################################################################
port = "COM3"
ser = serial.Serial(port, 9600) # open serial port that Arduino is using
ser.timeout=0.1
class Worker(QRunnable):
def __init__(self, *args, **kwargs):
super(Worker, self).__init__()
# Store constructor arguments (re-used for processing)
self.args = args
self.kwargs = kwargs
@pyqtSlot()
def run(self):
'''
Initialise the runner function with passed args, kwargs.
'''
#############################################################
class main_window(QMainWindow): # MAIN WINDOW
def __init__(self, *args, **kwargs):
super(main_window, self).__init__(*args, **kwargs)
loadUi('gui_v4.ui', self)
self.setWindowTitle("System monitorowania ruchu pacjentow")
self.pushButtonObserve.clicked.connect(self.pushButtonObserveClicked) # zmienic hello na cos innego
self.pushButtonBegin.clicked.connect(self.pushButtonBeginClicked)
self.newPatientButton.clicked.connect(self.newPatientButtonClicked)
self.newUserButton.clicked.connect(self.newUserButtonClicked)
self.rangeSlider.setMinimum(10) # 10 sekund
self.rangeSlider.setMaximum(180) # 3 * 60 sekund
self.rangeSlider.setValue(60) # ustalona wartosc poczatkowa
self.rangeSlider.setTickInterval(18) # liczba "Tickow" nie do konca wydaje sie byc uzywana
self.rangeSlider.setTickPosition(QSlider.TicksBelow) # ustalenie, ze Ticki (kreski) maja byc ponizej slidera
self.rangeSlider.valueChanged.connect(self.v_change) # okreslenie akcji nastepujacej po przesunieciu slidera, w programie jest to zmiana wartosci odpowiedniego LineEdita
self.sliderValueLineEdit.setText("60") # ta wartosc ma odpowiadac rangeSlider.setValue(_)
self.showHistoryButton.clicked.connect(self.showHistoryButtonClicked)
self.showEventsButton.clicked.connect(self.showEventsButtonClicked)
self.pushButtonFilterHistoryPatient.clicked.connect(self.pushButtonFilterHistoryPatientClicked)
self.pushButtonFilterLivePatient.clicked.connect(self.pushButtonFilterLivePatientClicked)
self.editPatientButton.clicked.connect(self.editPatientButtonClicked)
self.editUserButton.clicked.connect(self.editUserButtonClicked)
self.newSensorButton.clicked.connect(self.newSensorButtonClicked)
self.editSensorButton.clicked.connect(self.editSensorButtonClicked)
self.assignSensorPushButton.clicked.connect(self.assignSensorPushButtonClicked)
self.sendMsgPushButton.clicked.connect(self.sendMsgPushButtonClicked)
self.pushButtonCleanEvents.clicked.connect(self.pushButtonCleanEventsClicked)
self.eventLineEdit.setPlaceholderText("np. upadek")
self.filterLiveLineEdit.setPlaceholderText("imię, nazwisko lub ID")
self.filterHistoryLineEdit.setPlaceholderText("imię, nazwisko lub ID")
self.threadpool = QThreadPool()
self.current_user = None
def pushButtonCleanEventsClicked(self):
worker = Worker()
self.threadpool.start(worker)
qm = QMessageBox
ret = qm.question(self,'', "Czy na pewno chcesz wyczyscić listę zdarzeń?\n\n*zdarzenia można później wczytać z bazy danych", qm.Yes | qm.No)
if ret == qm.Yes:
self.eventList.clear()
def sendMsgPushButtonClicked(self):
python_to_arduino_msg_win.show()
worker = Worker()
self.threadpool.start(worker)
def pushButtonBeginClicked(self):
# print("Rozpoczęto wczytywanie danych z monitora szeregowego...")
notification_win.label.setText("\nRozpoczęto monitoring.\n")
notification_win.show()
self.counter = 0
# -------------------------- dotyczy wykrywania --> UPADKU <-- pacjentow ---------------------------------------------
self.dict_id_to_alarmvalue = {}
###################### #log #rejestr #zdarzenie ########################################################################################
# # print("login: ",self.current_user)
cursor.execute("SELECT ID_pracownika FROM personel WHERE login LIKE \"{jaki_login}\"".format(jaki_login=self.current_user))
ID_pracownika = cursor.fetchall()[0][0]
# # print("Wyswietlanie ID pracownika na podstawie loginu...")
# # print(ID_pracownika)
query = ("INSERT INTO rejestr_zdarzen (ID_pracownika,rodzaj_zdarzenia,opis_zdarzenia) VALUES (%s, %s, %s)")
taxi = (ID_pracownika, "rozpoczecie pomiaru", "")
cursor.execute(query, taxi)
cnx.commit()
self.eventList.insertItem(0, "rozpoczecie pomiaru, "+str(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')))
########################################################################################################################################
# stworzenie Dictionary (slownika) z ID_czujnika przypisanymi do okr. pacjentow i wart. alar. tych pacj.
cursor.execute("SELECT prz.ID_czujnika, pac.wartosc_alarmowa\
FROM przydzial_czujnikow prz\
JOIN pacjenci pac\
ON prz.ID_pacjenta = pac.ID_pacjenta;")
# print("...SELECT query succeeded...")
myresult = cursor.fetchall()
for x in myresult:
# print(x[0],x[1])
self.dict_id_to_alarmvalue[str(x[0])] = str(x[1])
# uzycie slownika: dict_id_to_alarmvalue[ID_czujnika] zwraca wartosc alarmowa
#---------------------------------------------------------------------------------------------------------------------
# ########################## dotyczy wykrywania --> BEZDECHU <-- pacjentow ###########################################
arr_5s = np.linspace(100.01,101.50,150) # stworzenie wektora 151 wartosci, 1. wart. to ID czujnika, pozostale 150 to ostatnie wart. pomiarow
self.df_sekw_bezdechu = 100*[arr_5s] # stworzenie df, gdzie kazdy numer wiersza oznacza ID czujnika, a wartosci w tym wierszu to kolejne pobrane pomiary
######################################################################################################################
def execute_single_import():
try:
temp = ser.readline().decode('utf-8')
temp=str(temp)
temp = temp.split()
# # print(temp)
query = ("INSERT INTO pomiary (ID_czujnika, modul, x_axis, y_axis, z_axis) VALUES (%s, %s, %s, %s, %s)")
taxi = (temp[0], temp[1], temp[2], temp[3], temp[4])
cursor.execute(query, taxi)
id_czujnika = temp[0]
mod = temp[1]
x_value = temp[2]
###################################### dopisanie pomiaru do listy i sprawdzenie czy nie ma bezdechu, czyli czy max-min<0,3 przez 5[s]
self.df_sekw_bezdechu[int(id_czujnika)] = np.roll(self.df_sekw_bezdechu[int(id_czujnika)],1) # przesuniecie listy pomiarow w prawo
self.df_sekw_bezdechu[int(id_czujnika)][0] = float(x_value)
np.set_# printoptions(precision=2)
np.set_# printoptions(suppress=True)
# print(self.df_sekw_bezdechu[int(id_czujnika)])
max_value = np.max(self.df_sekw_bezdechu[int(id_czujnika)])
min_value = np.min(self.df_sekw_bezdechu[int(id_czujnika)])
if (max_value-min_value)<0.03:
cursor.execute("SELECT pac.imie, pac.nazwisko\
FROM pacjenci pac\
JOIN przydzial_czujnikow prz ON pac.ID_pacjenta=prz.ID_pacjenta\
JOIN czujniki czu ON prz.ID_czujnika=czu.ID_czujnika\
WHERE czu.ID_czujnika={jakie_id};".format(jakie_id=temp[0]))
myresult = cursor.fetchall()
imie = myresult[0][0]
nazwisko = myresult[0][1]
notification_win.label.setText("\nPacjent {jakie_imie} {jakie_nazwisko} nie wykazuje aktywnosci. Podejrzenie bezdechu.\n".format(jakie_imie=imie,jakie_nazwisko=nazwisko))
notification_win.show()
self.df_sekw_bezdechu[int(id_czujnika)] = arr_5s # wypelnienie sekwencji nie-bezdechem, aby zapobiec "spamowi" komunikatow o bezdechu
###################### #log #rejestr #zdarzenie ########################################################################################
cursor.execute("SELECT ID_pracownika FROM personel WHERE login LIKE \"{jaki_login}\"".format(jaki_login=window.current_user))
ID_pracownika = cursor.fetchall()[0][0]
# # print("Wyswietlanie ID pracownika na podstawie loginu...")
# # print(ID_pracownika)
query = ("INSERT INTO rejestr_zdarzen (ID_pracownika,rodzaj_zdarzenia,opis_zdarzenia) VALUES (%s, %s, %s)")
taxi = (ID_pracownika, "Bezdech - {jakie_imie} {jakie_nazwisko}".format(jakie_imie=imie,jakie_nazwisko=nazwisko), "")
cursor.execute(query, taxi)
cnx.commit()
window.eventList.insertItem(0, "Bezdech - {jakie_imie} {jakie_nazwisko}, ".format(jakie_imie=imie,jakie_nazwisko=nazwisko)+str(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')))
########################################################################################################################################
###################################### sprawdzenie czy pacjent upadl
#jesli modul przekroczy wartosc alar. otrzymana po podaniu ID_czujnika do slownika przechowujacego wart. alarmowe
try:
# jezeli zmierzona wartosc modulu, czyli temp[1], jest wieksza niz wartosc alarmowa dla tego ID_czujnika, czyli slownik( temp[0] )
if (float(mod)>float(self.dict_id_to_alarmvalue[str(id_czujnika)])):
cursor.execute("SELECT pac.imie, pac.nazwisko\
FROM pacjenci pac\
JOIN przydzial_czujnikow prz ON pac.ID_pacjenta=prz.ID_pacjenta\
JOIN czujniki czu ON prz.ID_czujnika=czu.ID_czujnika\
WHERE czu.ID_czujnika={jakie_id};".format(jakie_id=temp[0]))
myresult = cursor.fetchall()
imie = myresult[0][0]
nazwisko = myresult[0][1]
# print("Pacjent X Y upadl.")
notification_win.label.setText("\nPacjent {jakie_imie} {jakie_nazwisko} upadl.\n".format(jakie_imie=imie,jakie_nazwisko=nazwisko))
notification_win.show()
# print("mod = "+str(float(temp[1]))+", dict_id_to_alarmvalue value = "+self.dict_id_to_alarmvalue [str(x[0])])
# print("taxi: ",taxi)
###################### #log #rejestr #zdarzenie ########################################################################################
cursor.execute("SELECT ID_pracownika FROM personel WHERE login LIKE \"{jaki_login}\"".format(jaki_login=window.current_user))
ID_pracownika = cursor.fetchall()[0][0]
# # print("Wyswietlanie ID pracownika na podstawie loginu...")
# # print(ID_pracownika)
query = ("INSERT INTO rejestr_zdarzen (ID_pracownika,rodzaj_zdarzenia,opis_zdarzenia) VALUES (%s, %s, %s)")
taxi = (ID_pracownika, "Upadek - {jakie_imie} {jakie_nazwisko}".format(jakie_imie=imie,jakie_nazwisko=nazwisko), "")
cursor.execute(query, taxi)
cnx.commit()
window.eventList.insertItem(0, "Upadek - {jakie_imie} {jakie_nazwisko}, ".format(jakie_imie=imie,jakie_nazwisko=nazwisko)+str(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')))
########################################################################################################################################
except:
pass
######################################
# if temp[1]>wartosc_graniczna_dla_danego_pacjenta
# # print("INSERT wykonany poprawnie")
self.counter = self.counter + 1
# # print("counter zwiekszony, counter = ", self.counter)
if ((self.counter%100)==0):
cnx.commit()
self.counter=0
# print("Zaimportowano 100 rekordow. Wykonano commit w bazie danych.")
except:
pass
self.timer = QTimer()
self.timer.setInterval(10)
self.timer.timeout.connect(lambda: execute_single_import())
self.timer.start()
###################################################### Wczytywanie pacjentow z bazy do Comboboxa Historii
def assignSensorPushButtonClicked(self):
assign_sensor_window.show()
worker = Worker()
self.threadpool.start(worker)
def editSensorButtonClicked(self):
edit_sensor_window.show()
worker = Worker()
self.threadpool.start(worker)
def newSensorButtonClicked(self):
new_sensor_window.show()
worker = Worker()
self.threadpool.start(worker)
def editPatientButtonClicked(self):
edit_patient_window.show()
worker = Worker()
self.threadpool.start(worker)
def editUserButtonClicked(self):
edit_user_window.show()
worker = Worker()
self.threadpool.start(worker)
def pushButtonFilterHistoryPatientClicked(self):
self.patientHistoryComboBox.clear()
worker = Worker()
self.threadpool.start(worker)
# print("Wybor pacjentow... ")
seekHist = self.filterHistoryLineEdit.text()
# print(seekHist)
try:
cursor.execute("SELECT imie, nazwisko FROM pacjenci WHERE imie LIKE BINARY \'%{seek}%\' OR nazwisko LIKE BINARY \'%{seek}%\' OR ID_pacjenta LIKE BINARY \'%{seek}%\'".format(seek=seekHist))
# usunac przedrostek BINARY, jezeli sie chce case_insensitive
# cursor.execute("SELECT imie, nazwisko FROM pacjenci")
# print("...SELECT query succeeded...")
# OK.... ale teraz jak w matplotlibie okreslic DATĘ jako os X, i x_axis jako os Y (x_axis to wartosci, os pionowa)
myresult = cursor.fetchall()
# # print("The length of \'myresult\' is: ", len(myresult)) # pokazuje ile rekordow ma zostac wykorzystanych na wykresie
pacjenci = []
for x in myresult:
pacjenci.append(str(x[0])+" "+str(x[1]))
self.patientHistoryComboBox.addItems(pacjenci)
###################################################################
except:
pass
# print("SELECT query failed")
def pushButtonFilterLivePatientClicked(self):
self.patientLiveComboBox.clear()
worker = Worker()
self.threadpool.start(worker)
# print("Wybor pacjentow... ")
#Connect with database
seekLive = self.filterLiveLineEdit.text()
# print(seekLive)
try:
cursor.execute("SELECT imie, nazwisko FROM pacjenci WHERE imie LIKE BINARY \'%{seek}%\' OR nazwisko LIKE BINARY \'%{seek}%\' OR ID_pacjenta LIKE BINARY \'%{seek}%\'".format(seek=seekLive))
# usunac przedrostek BINARY, jezeli sie chce case_insensitive
# print("...SELECT query succeeded...")
# OK.... ale teraz jak w matplotlibie okreslic DATĘ jako os X, i x_axis jako os Y (x_axis to wartosci, os pionowa)
myresult = cursor.fetchall()
# # print("The length of \'myresult\' is: ", len(myresult)) # pokazuje ile rekordow ma zostac wykorzystanych na wykresie
pacjenci = []
for x in myresult:
pacjenci.append(str(x[0])+" "+str(x[1]))
self.patientLiveComboBox.addItems(pacjenci)
###################################################################
except:
pass
# print("SELECT query failed")
def showEventsButtonClicked(self):
# print("Filter events button clicked...")
worker = Worker()
self.threadpool.start(worker)
self.eventList.clear()
timeFrom = QTime()
timeTo = QTime()
timeFrom = self.eventTimeFrom.time()
timeTo = self.eventTimeTo.time()
timeFromStr = timeFrom.toString() # odczytana godzina w formacie HH:MM:DD
timeToStr = timeTo.toString() # odczytana godzina w formacie HH:MM:DD
dateFrom = QDate()
dateTo = QDate()
dateFrom = self.eventDateFrom.date()
dateTo = self.eventDateTo.date()
dateFromStr = dateFrom.toString("yyyy-MM-dd") # odczytana data w formacie RRRR-MM-DD
dateToStr = dateTo.toString("yyyy-MM-dd") # odczytana data w formacie RRRR-MM-DD
dateTimeFrom = dateFromStr + " " + timeFromStr
dateTimeTo = dateToStr + " " + timeToStr
# dateTimeFrom i dateTimeTo sa uzywane w SELECTie historii, do okreslenia zakresu
# wybraniu zakresu, os X zawiera sekundy, poniewaz w przypadku daty na osi X, bylo wiele pomiarow w jednej sekundzie, wiele kropek w pionie i wykres byl totalnie nieczytalny.
###################################################### Odczytywanie czasu z widgetow ^^^^^^^^^^^^^^^
# print("Filtrowanie zdarzen...")
# print("dateTimeFrom = "+str(dateTimeFrom))
# print("dateTimeTo = "+str(dateTimeTo))
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! WAZNE
# teraz odczytac imie i nazwisko (A MOZE COS JESZCZE?...) i na podstawie tego zJOINOWAC ID_czujnika i na podstawie ID czujnika dodać to do WHERE historii i tak samo LIVE'a
seekEvent = self.eventLineEdit.text()
# # print("SELECT ID_pomiaru, x_axis FROM pomiary WHERE data_i_czas_pomiaru BETWEEN \'{data_i_czas_od}\' AND \'{data_i_czas_do}\' AND WHERE ID_czujnika==1 SELECT ID_czujnika FROM przydzial_czujnikow WHERE ID_czujnika".format(data_i_czas_od=dateTimeFrom,data_i_czas_do=dateTimeTo))
try:
cursor.execute("SELECT per.imie, rej.rodzaj_zdarzenia, rej.data_i_czas_zdarzenia\
FROM rejestr_zdarzen rej\
JOIN personel per\
ON rej.ID_pracownika=per.ID_pracownika\
WHERE rej.data_i_czas_zdarzenia BETWEEN \"{data_i_czas_od}\" AND \"{data_i_czas_do}\"\
AND rej.rodzaj_zdarzenia LIKE \'%{jakie_zdarzenia}%\'".format(data_i_czas_od=dateTimeFrom,data_i_czas_do=dateTimeTo,jakie_zdarzenia=seekEvent))
# print("...SELECT query succeeded...")
# OK.... ale teraz jak w matplotlibie okreslic DATĘ jako os X, i x_axis jako os Y (x_axis to wartosci, os pionowa)
myresult = cursor.fetchall()
# # print("The length of \'myresult\' is: ", len(myresult)) # pokazuje ile rekordow ma zostac wykorzystanych na wykresie
for x in myresult:
# # print(x[5].strftime('%Y-%m-%d %H:%M:%S'))
window.eventList.insertItem(0, str(x[0])+", "+str(x[1])+", "+str(x[2].strftime('%Y-%m-%d %H:%M:%S')))
notification_win.label.setText("Zakonczono importowanie zdarzeń.")
notification_win.show()
except:
pass
# print(e)
# print("SELECT query failed")
notification_win.label.setText("Niepowodzenie dodania zdarzen.")
notification_win.show()
def showHistoryButtonClicked(self):
# print("showHistoryButtonClicked")
worker = Worker()
self.threadpool.start(worker)
timeFrom = QTime()
timeTo = QTime()
timeFrom = self.historyFromTimeEdit.time()
timeTo = self.historyToTimeEdit.time()
timeFromStr = timeFrom.toString() # odczytana godzina w formacie HH:MM:DD
timeToStr = timeTo.toString() # odczytana godzina w formacie HH:MM:DD
dateFrom = QDate()
dateTo = QDate()
dateFrom = self.historyFromDateEdit.date()
dateTo = self.historyToDateEdit.date()
dateFromStr = dateFrom.toString("yyyy-MM-dd") # odczytana data w formacie RRRR-MM-DD
dateToStr = dateTo.toString("yyyy-MM-dd") # odczytana data w formacie RRRR-MM-DD
dateTimeFrom = dateFromStr + " " + timeFromStr
dateTimeTo = dateToStr + " " + timeToStr
# dateTimeFrom i dateTimeTo sa uzywane w SELECTie historii, do okreslenia zakresu
# wybraniu zakresu, os X zawiera sekundy, poniewaz w przypadku daty na osi X, bylo wiele pomiarow w jednej sekundzie, wiele kropek w pionie i wykres byl totalnie nieczytalny.
###################################################### Odczytywanie czasu z widgetow ^^^^^^^^^^^^^^^
# print("Drukowanie wykresu HISTORII wybranego pacjenta...")
# print("dateTimeFrom = "+str(dateTimeFrom))
# print("dateTimeTo = "+str(dateTimeTo))
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! WAZNE
# teraz odczytac imie i nazwisko (A MOZE COS JESZCZE?...) i na podstawie tego zJOINOWAC ID_czujnika i na podstawie ID czujnika dodać to do WHERE historii i tak samo LIVE'a
wybrany_pacjent = self.patientHistoryComboBox.currentText()
try:
wybrany_pacjent = wybrany_pacjent.split()
wybrane_imie = wybrany_pacjent[0]
wybrane_nazwisko = wybrany_pacjent[1]
except:
pass
# # print("SELECT ID_pomiaru, x_axis FROM pomiary WHERE data_i_czas_pomiaru BETWEEN \'{data_i_czas_od}\' AND \'{data_i_czas_do}\' AND WHERE ID_czujnika==1 SELECT ID_czujnika FROM przydzial_czujnikow WHERE ID_czujnika".format(data_i_czas_od=dateTimeFrom,data_i_czas_do=dateTimeTo))
try:
cursor.execute("SELECT ID_pomiaru, x_axis\
FROM pomiary pom\
INNER JOIN czujniki cz\
ON pom.ID_czujnika=cz.ID_czujnika\
INNER JOIN przydzial_czujnikow prz\
ON prz.ID_czujnika=cz.ID_czujnika\
INNER JOIN pacjenci pac\
ON prz.ID_pacjenta=pac.ID_pacjenta\
WHERE pac.imie LIKE \'{imie}\' AND pac.nazwisko LIKE \'{nazwisko}\'\
AND data_i_czas_pomiaru BETWEEN \"{data_i_czas_od}\" AND \"{data_i_czas_do}\"".format(imie=wybrane_imie,nazwisko=wybrane_nazwisko,data_i_czas_od=dateTimeFrom,data_i_czas_do=dateTimeTo))
# print("...SELECT query succeeded...")
# OK.... ale teraz jak w matplotlibie okreslic DATĘ jako os X, i x_axis jako os Y (x_axis to wartosci, os pionowa)
myresult = cursor.fetchall()
# # print("The length of \'myresult\' is: ", len(myresult)) # pokazuje ile rekordow ma zostac wykorzystanych na wykresie
array_x = []
array_y = []
for x in myresult:
array_x.append(float(x[0]))
array_y.append(float(x[1]))
###################################################################
fig = plt.figure(figsize=(18, 16), dpi= 80, facecolor='w', edgecolor='k')
ax = plt.subplot(111)
# NADAC ODPOWIEDNI LABEL ZALEZNIE OD IMIENIA PACJENTA # TODO #
line1, = ax.plot(np.arange(0,len(array_x)*0.03,0.03), array_y, label='Historia ruchu')
# Shrink current axis's height by 10% on the bottom
box = ax.get_position()
ax.set_position([box.x0, box.y0 + box.height * 0.1,
box.width, box.height * 0.9])
# Put a legend below current axis
ax.legend(loc='upper right', bbox_to_anchor=(0.4, 1.0),
ncol=3, fancybox=True, shadow=True)
plt.xlabel("czas [s]")
plt.ylabel("amplituda [g]")
# plt.title("Wykres oddechu pięciu badanych osób")
plt.grid()
# ax.yaxis.set_ticks(np.arange(-0.1,0.5,0.05))
plt.show()
except:
# print("SELECT query failed")
notification_win.label.setText("Niepowodzenie wyswietlania wykresu. Nie wybrano pacjenta lub nie udało się połączyć z bazą danych. \n\nUpewnij się, czy kliknięto przycisk Filtruj.")
notification_win.show()
############################################################## Rysowanie wykresu z historii ^^^^^^^^^^^^^^ @ UP
def v_change(self):
value = str(self.rangeSlider.value())
self.sliderValueLineEdit.setText(value)
worker = Worker()
self.threadpool.start(worker)
def pushButtonObserveClicked(self): # funkcja testowa, usunac lub wymienic na inna
# print("Drukowanie wykresu...")
worker = Worker()
self.threadpool.start(worker)
# dodatkowo uzaleznic wyswietlane rekordy od wybranego pacjenta, imienia, MAC czujnika, ID pacjenta czy cokolwiek # TODO #
# na podstawie slidera okreslic zakres --> dac zamiast 10000 przeskalowana wartosc
if int(self.sliderValueLineEdit.text()) < 10 or int(self.sliderValueLineEdit.text()) > 3000:
self.sliderValueLineEdit.setText("60")
jaki_zakres = self.sliderValueLineEdit.text()
# print("Podany zakres czasu powinien zawierac sie w zakresie od 10 do 3000 sekund.")
wybrany_pacjent = self.patientLiveComboBox.currentText()
try:
wybrany_pacjent = wybrany_pacjent.split()
wybrane_imie = wybrany_pacjent[0]
wybrane_nazwisko = wybrany_pacjent[1]
except:
pass
try:
cursor.execute("SELECT ID_pomiaru,x_axis \
FROM pomiary pom \
INNER JOIN czujniki cz \
ON pom.ID_czujnika=cz.ID_czujnika \
INNER JOIN przydzial_czujnikow prz \
ON prz.ID_czujnika=cz.ID_czujnika \
INNER JOIN pacjenci pac \
ON prz.ID_pacjenta=pac.ID_pacjenta \
WHERE pac.imie LIKE \'{imie}\' \
AND pac.nazwisko LIKE \'{nazwisko}\' \
AND ID_pomiaru > ((SELECT MAX(ID_pomiaru) FROM pomiary)-(33*{sekundy}));".format(imie=wybrane_imie,nazwisko=wybrane_nazwisko,sekundy=jaki_zakres))
# print("...SELECT query succeeded...")
myresult = cursor.fetchall()
# # print("The length of \'myresult\' is: ", len(myresult)) # pokazuje ile rekordow ma zostac wykorzystanych na wykresie
array_x = []
array_y = []
for x in myresult:
array_x.append(float(x[0]))
array_y.append(float(x[1]))
###################################################################
fig = plt.figure(figsize=(18, 16), dpi= 80, facecolor='w', edgecolor='k')
ax = plt.subplot(111)
# NADAC ODPOWIEDNI LABEL ZALEZNIE OD IMIENIA PACJENTA # TODO #
line1, = ax.plot(np.arange(0,len(array_x)*0.03,0.03), array_y, label='{imie_i_nazwisko}'.format(imie_i_nazwisko=wybrane_imie+" "+wybrane_nazwisko))
# Shrink current axis's height by 10% on the bottom
box = ax.get_position()
ax.set_position([box.x0, box.y0 + box.height * 0.1,
box.width, box.height * 0.9])
# Put a legend below current axis
ax.legend(loc='upper right', bbox_to_anchor=(0.4, 1.0),
ncol=3, fancybox=True, shadow=True)
plt.xlabel("czas [s]")
plt.ylabel("amplituda [g]")
# plt.title("Wykres oddechu pięciu badanych osób")
plt.grid()
# ax.yaxis.set_ticks(np.arange(-0.1,0.5,0.05))
plt.show()
self.currentPersonLabel.setText(self.patientLiveComboBox.currentText())
except:
# print("SELECT query failed")
self.currentPersonLabel.setText("---")
notification_win.label.setText("Niepowodzenie wyswietlania wykresu. Nie wybrano pacjenta lub nie udało się połączyć z bazą danych. \n\nUpewnij się, czy kliknięto przycisk Filtruj.")
notification_win.show()
#######################################################################################################################
######################################################################################## funkcje otwierajace nowe okna po kliknieciu przycisku w glownym GUI
def newPatientButtonClicked(self):
# print("Adding new patient...")
new_patient_window.show()
worker = Worker()
self.threadpool.start(worker)
def newUserButtonClicked(self):
# print("Adding new user...")
new_user_window.show()
worker = Worker()
self.threadpool.start(worker)
class new_patient(QMainWindow): #
def __init__(self):
QMainWindow.__init__(self)
loadUi('add_patient_gui.ui', self)
self.setWindowTitle("Dodawanie nowego pacjenta")
self.pushButtonAdd.clicked.connect(self.pushButtonAddClicked)
self.pushButtonAbort.clicked.connect(self.pushButtonAbortClicked)
self.birthDateLineEdit.setPlaceholderText("RRRR-MM-DD")
self.emailLineEdit.setPlaceholderText("email@address.com")
self.sexComboBox.addItem("Mężczyzna")
self.sexComboBox.addItem("Kobieta")
self.threadpool = QThreadPool()
def pushButtonAbortClicked(self):
new_patient_window.hide()
worker = Worker()
self.threadpool.start(worker)
def pushButtonAddClicked(self):
imie = self.nameLineEdit.text()
nazwisko = self.surnameLineEdit.text()
# plec = self.sexLineEdit.text()
plec = self.sexComboBox.currentText()
data_urodzenia = self.birthDateLineEdit.text()
PESEL = self.peselLineEdit.text()
telefon = self.phoneLineEdit.text()
email = self.emailLineEdit.text()
kod_pocztowy = self.cityCodeLineEdit.text()
miejscowosc = self.cityLineEdit.text()
ulica = self.streetLineEdit.text()
wartosc_alarmowa = self.alarmValueLineEdit.text()
worker = Worker()
self.threadpool.start(worker)
#Writing Query to insert data
query = ("INSERT INTO pacjenci (imie, nazwisko, plec, data_urodzenia, PESEL, telefon, email, kod_pocztowy, miejscowosc, ulica, wartosc_alarmowa) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)")
taxi = (imie, nazwisko, plec, data_urodzenia, PESEL ,telefon, email, kod_pocztowy, miejscowosc, ulica, wartosc_alarmowa) # zamiast jedynki mozna wrzucic zmienna pobraną z pola EditText (trzeba takie dodać) gdzie uzytkownik wpisze numer czujnika z palca LUB jego ID
try:
cursor.execute(query, taxi) #Execute the Query
cnx.commit()
# print("Dodano nowego pacjenta.")
# Czyszczenie wprowadzonego tekstu
self.nameLineEdit.setText("")
self.surnameLineEdit.setText("")
# self.sexLineEdit.setText("") # zmienić na combobox?, nie, comboboxa plci NIE TRZEBA czyscic !!!
self.sexComboBox.clear()
self.sexComboBox.addItem("Mężczyzna")
self.sexComboBox.addItem("Kobieta")
self.birthDateLineEdit.setText("")
self.peselLineEdit.setText("")
self.phoneLineEdit.setText("")
self.emailLineEdit.setText("")
self.cityCodeLineEdit.setText("")
self.cityLineEdit.setText("")
self.streetLineEdit.setText("")
self.alarmValueLineEdit.setText("")
notification_win.label.setText("Dodano nowego pacjenta.")
notification_win.show()
# TODO # zarejestrowac ta akcje w logach zdarzen
###################### #log #rejestr #zdarzenie ########################################################################################
cursor.execute("SELECT ID_pracownika FROM personel WHERE login LIKE \"{jaki_login}\"".format(jaki_login=window.current_user))
ID_pracownika = cursor.fetchall()[0][0]
# # print("Wyswietlanie ID pracownika na podstawie loginu...")
# # print(ID_pracownika)
query = ("INSERT INTO rejestr_zdarzen (ID_pracownika,rodzaj_zdarzenia,opis_zdarzenia) VALUES (%s, %s, %s)")
taxi = (ID_pracownika, "Dodanie pacjenta {jakie_imie} {jakie_nazwisko}".format(jakie_imie=imie,jakie_nazwisko=nazwisko), "")
cursor.execute(query, taxi)
cnx.commit()
window.eventList.insertItem(0, "Dodanie pacjenta {jakie_imie} {jakie_nazwisko}, ".format(jakie_imie=imie,jakie_nazwisko=nazwisko)+str(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')))
########################################################################################################################################
new_patient_window.hide()
except:
notification_win.label.setText("Niepoprawne dane. Zwróć uwagę, czy data urodzenia oraz email mają poprawny format.")
notification_win.show()
cnx.rollback()
class edit_patient(QMainWindow): #
def __init__(self):
QMainWindow.__init__(self)
loadUi('edit_patient_gui.ui', self)
self.setWindowTitle("Edycja danych pacjenta")
self.pushButtonSaveChanges.clicked.connect(self.pushButtonSaveChangesClicked)
self.pushButtonAbort.clicked.connect(self.pushButtonAbortClicked)
self.birthDateLineEdit.setPlaceholderText("RRRR-MM-DD")
self.emailLineEdit.setPlaceholderText("email@address.com")
self.pushButtonFilterEditPatient.clicked.connect(self.pushButtonFilterEditPatientClicked)
self.pushButtonLoadToEditPatient.clicked.connect(self.pushButtonLoadToEditPatientClicked)
self.pushButtonDeletePatient.clicked.connect(self.pushButtonDeletePatientClicked)
self.threadpool = QThreadPool()
def pushButtonFilterEditPatientClicked(self):
# Filtrowanie pacjentow
self.patientToEditComboBox.clear()
worker = Worker()
self.threadpool.start(worker)
# print("Wybor pacjentow... ")
seekToEdit = self.filterToEditLineEdit.text()
# print(seekToEdit)
try:
cursor.execute("SELECT imie, nazwisko FROM pacjenci WHERE imie LIKE BINARY \'%{seek}%\' OR nazwisko LIKE BINARY \'%{seek}%\' OR ID_pacjenta LIKE BINARY \'%{seek}%\'".format(seek=seekToEdit))
# usunac przedrostek BINARY, jezeli sie chce case_insensitive
# cursor.execute("SELECT imie, nazwisko FROM pacjenci")
# print("...SELECT query succeeded...")
# OK.... ale teraz jak w matplotlibie okreslic DATĘ jako os X, i x_axis jako os Y (x_axis to wartosci, os pionowa)
myresult = cursor.fetchall()
# # print("The length of \'myresult\' is: ", len(myresult)) # pokazuje ile rekordow ma zostac wykorzystanych na wykresie
pacjenci = []
for x in myresult:
pacjenci.append(str(x[0])+" "+str(x[1]))
self.patientToEditComboBox.addItems(pacjenci)
###################################################################
except:
pass
# print("SELECT query failed")
def pushButtonLoadToEditPatientClicked(self):
# print("Ladowanie danych pacjenta... ")
worker = Worker()
self.threadpool.start(worker)
# seekHist = self.filterToEditLineEdit.text()
# # print(seekHist)
wybrany_pacjent = self.patientToEditComboBox.currentText()
try:
wybrany_pacjent = wybrany_pacjent.split()
wybrane_imie = wybrany_pacjent[0]
wybrane_nazwisko = wybrany_pacjent[1]
except:
pass
try:
cursor.execute("SELECT imie, nazwisko, plec, data_urodzenia, PESEL, telefon, email, kod_pocztowy, miejscowosc, ulica, wartosc_alarmowa FROM pacjenci WHERE imie LIKE \'%{imie}%\' AND nazwisko LIKE \'%{nazwisko}%\'".format(imie=wybrane_imie, nazwisko=wybrane_nazwisko))
# usunac przedrostek BINARY, jezeli sie chce case_insensitive
# cursor.execute("SELECT imie, nazwisko FROM pacjenci")
# print("...SELECT query succeeded...")
# OK.... ale teraz jak w matplotlibie okreslic DATĘ jako os X, i x_axis jako os Y (x_axis to wartosci, os pionowa)
myresult = cursor.fetchall()
# # print("The length of \'myresult\' is: ", len(myresult)) # pokazuje ile rekordow ma zostac wykorzystanych na wykresie
# pacjenci = []
for x in myresult:
# pacjenci.append(str(x[0])+" "+str(x[1]))
self.nameLineEdit.setText(str(x[0]))
self.surnameLineEdit.setText(str(x[1]))
# self.sexLineEdit.setText(str(x[2]))
self.sexComboBox.clear()
self.sexComboBox.addItem(str(x[2]))
if self.sexComboBox.currentText()[0]=="M":
self.sexComboBox.addItem("Kobieta")
else:
self.sexComboBox.addItem("Mezczyzna")
self.birthDateLineEdit.setText(str(x[3]))
self.peselLineEdit.setText(str(x[4]))
self.phoneLineEdit.setText(str(x[5]))
self.emailLineEdit.setText(str(x[6]))
self.cityCodeLineEdit.setText(str(x[7]))
self.cityLineEdit.setText(str(x[8]))
self.streetLineEdit.setText(str(x[9]))
self.alarmValueLineEdit.setText(str(x[10]))
###################################################################
except:
pass
# print("SELECT query failed")
def pushButtonAbortClicked(self):
edit_patient_window.hide()
worker = Worker()
self.threadpool.start(worker)
def pushButtonSaveChangesClicked(self):
noweImie = self.nameLineEdit.text()
noweNazwisko = self.surnameLineEdit.text()
# nowaPlec = self.sexLineEdit.text()
nowaPlec = self.sexComboBox.currentText()
nowaData_urodzenia = self.birthDateLineEdit.text()
nowyPESEL = self.peselLineEdit.text()
nowyTelefon = self.phoneLineEdit.text()
nowyEmail = self.emailLineEdit.text()
nowyKod_pocztowy = self.cityCodeLineEdit.text()
nowaMiejscowosc = self.cityLineEdit.text()
nowaUlica = self.streetLineEdit.text()
nowaWartoscAlarmowa = self.alarmValueLineEdit.text()
worker = Worker()
self.threadpool.start(worker)
#Writing Query to insert data
# Przekazanie, ktora osoba ma zostac edytowana do buttona potwierdzajacego i wykonujacego UPDATE
# Pobranie tych danych z aktualnego ComboBoxa
wybrany_pacjent = self.patientToEditComboBox.currentText()
try:
wybrany_pacjent = wybrany_pacjent.split()
wybrane_imie = wybrany_pacjent[0]
wybrane_nazwisko = wybrany_pacjent[1]
except:
pass
query = ("UPDATE pacjenci SET imie=\'{imie2}\', nazwisko=\'{nazwisko2}\', plec=\'{plec2}\', data_urodzenia=\'{data_urodzenia2}\', PESEL=\'{PESEL2}\',\
telefon=\'{telefon2}\', email=\'{email2}\', kod_pocztowy=\'{kod_pocztowy2}\', miejscowosc=\'{miejscowosc2}\', ulica=\'{ulica2}\', wartosc_alarmowa=\'{wartosc_alarmowa2}\' WHERE imie LIKE\
\'{jakie_imie}\' AND nazwisko LIKE '\{jakie_nazwisko}\'".format(imie2=noweImie,nazwisko2=noweNazwisko,plec2=nowaPlec,\
data_urodzenia2=nowaData_urodzenia,PESEL2=nowyPESEL,telefon2=nowyTelefon,email2=nowyEmail,kod_pocztowy2=nowyKod_pocztowy,\
miejscowosc2=nowaMiejscowosc,ulica2=nowaUlica,wartosc_alarmowa2=nowaWartoscAlarmowa,jakie_imie=wybrane_imie,jakie_nazwisko=wybrane_nazwisko))
# taxi = (imie, nazwisko, plec, data_urodzenia, PESEL ,telefon, email, kod_pocztowy, miejscowosc, ulica) # zamiast jedynki mozna wrzucic zmienna pobraną z pola EditText (trzeba takie dodać) gdzie uzytkownik wpisze numer czujnika z palca LUB jego ID
try:
cursor.execute(query) #Execute the Query
cnx.commit()
# print("Zmieniono dane pacjenta.")
# Czyszczenie wprowadzonego tekstu
self.nameLineEdit.setText("")
self.surnameLineEdit.setText("")
# self.sexLineEdit.setText("")
self.sexComboBox.clear()
self.birthDateLineEdit.setText("")
self.peselLineEdit.setText("")
self.phoneLineEdit.setText("")
self.emailLineEdit.setText("")
self.cityCodeLineEdit.setText("")
self.cityLineEdit.setText("")
self.streetLineEdit.setText("")
self.alarmValueLineEdit.setText("")
notification_win.label.setText("Zmieniono dane pacjenta.")
notification_win.show()
# TODO # zarejestrowac ta akcje w logach zdarzen
###################### #log #rejestr #zdarzenie ########################################################################################
cursor.execute("SELECT ID_pracownika FROM personel WHERE login LIKE \"{jaki_login}\"".format(jaki_login=window.current_user))
ID_pracownika = cursor.fetchall()[0][0]
# # print("Wyswietlanie ID pracownika na podstawie loginu...")
# # print(ID_pracownika)
query = ("INSERT INTO rejestr_zdarzen (ID_pracownika,rodzaj_zdarzenia,opis_zdarzenia) VALUES (%s, %s, %s)")
taxi = (ID_pracownika, "Zmiana danych pacjenta {jakie_imie} {jakie_nazwisko}".format(jakie_imie=wybrane_imie,jakie_nazwisko=wybrane_nazwisko), "")
cursor.execute(query, taxi)
cnx.commit()
window.eventList.insertItem(0, "Zmiana danych pacjenta {jakie_imie} {jakie_nazwisko}, ".format(jakie_imie=wybrane_imie,jakie_nazwisko=wybrane_nazwisko)+str(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')))
########################################################################################################################################
except:
notification_win.label.setText("Niepoprawne dane. Zwróć uwagę, czy data urodzenia oraz email mają poprawny format.")
notification_win.show()
cnx.rollback()
def pushButtonDeletePatientClicked(self):
worker = Worker()
self.threadpool.start(worker)
#Writing Query to insert data
# Przekazanie, ktora osoba ma zostac edytowana do buttona potwierdzajacego i wykonujacego UPDATE
# Pobranie tych danych z aktualnego ComboBoxa
### Czekanie na potwierdzenie ...
# TODO pytanie o potwierdzenie skasowania pacjenta
# w zamysle po potwierdzeniu usuniecia w oknie delete_confirm_window powinno sie zamknac to okno i kontynuowac operacje ponizej czyli usuniecie pacjenta
confirmed = 1
# delete_confirm_window.show()
qm = QMessageBox
ret = qm.question(self,'', "Czy na pewno chcesz usunąć tego pacjenta?", qm.Yes | qm.No)
if ret == qm.Yes:
try:
wybrany_pacjent = self.patientToEditComboBox.currentText()
wybrany_pacjent = wybrany_pacjent.split()
wybrane_imie = wybrany_pacjent[0]
wybrane_nazwisko = wybrany_pacjent[1]
query = ("DELETE FROM pacjenci WHERE imie LIKE \'{jakie_imie}\' AND nazwisko LIKE '\{jakie_nazwisko}\'".\
format(jakie_imie=wybrane_imie,jakie_nazwisko=wybrane_nazwisko))
# taxi = (imie, nazwisko, plec, data_urodzenia, PESEL ,telefon, email, kod_pocztowy, miejscowosc, ulica) # zamiast jedynki mozna wrzucic zmienna pobraną z pola EditText (trzeba takie dodać) gdzie uzytkownik wpisze numer czujnika z palca LUB jego ID
cursor.execute(query) #Execute the Query
cnx.commit()
# print("Usunieto pacjenta {jakie_imie} {jakie_nazwisko}.".format(jakie_imie=wybrane_imie,jakie_nazwisko=wybrane_nazwisko))
# Czyszczenie wprowadzonego tekstu
self.nameLineEdit.setText("")
self.surnameLineEdit.setText("")
# self.sexLineEdit.setText("")
self.sexComboBox.clear()
self.birthDateLineEdit.setText("")
self.peselLineEdit.setText("")
self.phoneLineEdit.setText("")
self.emailLineEdit.setText("")
self.cityCodeLineEdit.setText("")
self.cityLineEdit.setText("")
self.streetLineEdit.setText("")
self.alarmValueLineEdit.setText("")
###################### #log #rejestr #zdarzenie ########################################################################################
cursor.execute("SELECT ID_pracownika FROM personel WHERE login LIKE \"{jaki_login}\"".format(jaki_login=window.current_user))
ID_pracownika = cursor.fetchall()[0][0]
# # print("Wyswietlanie ID pracownika na podstawie loginu...")
# # print(ID_pracownika)
query = ("INSERT INTO rejestr_zdarzen (ID_pracownika,rodzaj_zdarzenia,opis_zdarzenia) VALUES (%s, %s, %s)")
taxi = (ID_pracownika, "Usuniecie pacjenta {jakie_imie} {jakie_nazwisko}".format(jakie_imie=wybrane_imie,jakie_nazwisko=wybrane_nazwisko), "")
cursor.execute(query, taxi)
cnx.commit()
window.eventList.insertItem(0, "Usuniecie pacjenta {jakie_imie} {jakie_nazwisko}, ".format(jakie_imie=wybrane_imie,jakie_nazwisko=wybrane_nazwisko)+str(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')))
########################################################################################################################################
notification_win.label.setText("Usunieto pacjenta {jakie_imie} {jakie_nazwisko}.".format(jakie_imie=wybrane_imie,jakie_nazwisko=wybrane_nazwisko))
notification_win.show()
# TODO # zarejestrowac ta akcje w logach zdarzen
except:
notification_win.label.setText("Wystapil problem podczas usuwania pacjenta. Sprawdz czy pacjent zostal wybrany.")
notification_win.show()
cnx.rollback()
pass
class new_sensor(QMainWindow): #
def __init__(self):
QMainWindow.__init__(self)
loadUi('add_sensor_gui.ui', self)
self.setWindowTitle("Dodawanie nowego czujnika")
self.pushButtonAdd.clicked.connect(self.pushButtonAddClicked)
self.pushButtonAddDefaultID.clicked.connect(self.pushButtonAddDefaultIDClicked)
self.pushButtonAbort.clicked.connect(self.pushButtonAbortClicked)
self.macLineEdit.setPlaceholderText("AABBCCDDEEFF")
self.threadpool = QThreadPool()
def pushButtonAbortClicked(self):
new_patient_window.hide()
worker = Worker()
self.threadpool.start(worker)
def pushButtonAddDefaultIDClicked(self):
worker = Worker()
self.threadpool.start(worker)
mac_address = self.macLineEdit.text()
#Writing Query to insert data
query = ("INSERT INTO czujniki (MAC_czujnika) VALUES (\'{jaki_mac}\')".format(jaki_mac=mac_address))
# taxi = (mac_address) # zamiast jedynki mozna wrzucic zmienna pobraną z pola EditText (trzeba takie dodać) gdzie uzytkownik wpisze numer czujnika z palca LUB jego ID
try:
cursor.execute(query) #Execute the Query
cnx.commit()
# print("Dodano nowy czujnik.")
# Czyszczenie wprowadzonego tekstu
self.macLineEdit.setText("")
self.sensorIDLineEdit.setText("")
notification_win.label.setText("Dodano nowy czujnik.")
notification_win.show()
# TODO # zarejestrowac ta akcje w logach zdarzen
###################### #log #rejestr #zdarzenie ########################################################################################
cursor.execute("SELECT ID_pracownika FROM personel WHERE login LIKE \"{jaki_login}\"".format(jaki_login=window.current_user))
ID_pracownika = cursor.fetchall()[0][0]
# # print("Wyswietlanie ID pracownika na podstawie loginu...")
# # print(ID_pracownika)
query = ("INSERT INTO rejestr_zdarzen (ID_pracownika,rodzaj_zdarzenia,opis_zdarzenia) VALUES (%s, %s, %s)")
taxi = (ID_pracownika, "Dodano czujnik, MAC: {jaki_mac}".format(jaki_mac=mac_address), "")
cursor.execute(query, taxi)
cnx.commit()
window.eventList.insertItem(0, "Dodano czujnik, MAC: {jaki_mac}, ".format(jaki_mac=mac_address)+str(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')))
########################################################################################################################################
new_sensor_window.hide()
except:
notification_win.label.setText("Niepoprawne dane. Zwróć uwagę, czy data urodzenia oraz email mają poprawny format.")
notification_win.show()
# TODO zmienic komunikat, optymalnie wymusic znaki 0-9, A-F
cnx.rollback()
def pushButtonAddClicked(self):
worker = Worker()
self.threadpool.start(worker)
mac_address = self.macLineEdit.text()
sensor_id = self.sensorIDLineEdit.text()
#Writing Query to insert data
query = ("INSERT INTO czujniki (ID_czujnika, MAC_czujnika) VALUES (\'{jakie_id}\', \'{jaki_mac}\')".format(jakie_id = sensor_id,jaki_mac=mac_address))
# taxi = (mac_address) # zamiast jedynki mozna wrzucic zmienna pobraną z pola EditText (trzeba takie dodać) gdzie uzytkownik wpisze numer czujnika z palca LUB jego ID
try:
cursor.execute(query) #Execute the Query
cnx.commit()
# print("Dodano nowy czujnik.")
# Czyszczenie wprowadzonego tekstu
self.macLineEdit.setText("")
self.sensorIDLineEdit.setText("")
notification_win.label.setText("Dodano nowy czujnik.")
notification_win.show()
# TODO # zarejestrowac ta akcje w logach zdarzen
###################### #log #rejestr #zdarzenie ########################################################################################
cursor.execute("SELECT ID_pracownika FROM personel WHERE login LIKE \"{jaki_login}\"".format(jaki_login=window.current_user))
ID_pracownika = cursor.fetchall()[0][0]
# # print("Wyswietlanie ID pracownika na podstawie loginu...")
# # print(ID_pracownika)
query = ("INSERT INTO rejestr_zdarzen (ID_pracownika,rodzaj_zdarzenia,opis_zdarzenia) VALUES (%s, %s, %s)")
taxi = (ID_pracownika, "Dodano czujnik, MAC: {jaki_mac}".format(jaki_mac=mac_address), "")
cursor.execute(query, taxi)
cnx.commit()
window.eventList.insertItem(0, "Dodano czujnik, MAC: {jaki_mac}, ".format(jaki_mac=mac_address)+str(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')))
########################################################################################################################################
new_sensor_window.hide()
except:
notification_win.label.setText("Nie udało się dodać czujnika.\nPodane ID czujnika może już istnieć w bazie danych.")
notification_win.show()
# TODO zmienic komunikat, optymalnie wymusic znaki 0-9, A-F
cnx.rollback()
class edit_sensor(QMainWindow): #
def __init__(self):
QMainWindow.__init__(self)
loadUi('edit_sensor_gui.ui', self)
self.setWindowTitle("Edytowanie danych czujnika")
self.pushButtonSaveChanges.clicked.connect(self.pushButtonSaveChangesClicked)
self.pushButtonAbort.clicked.connect(self.pushButtonAbortClicked)
self.macLineEdit.setPlaceholderText("AABBCCDDEEFF")
self.pushButtonFilter.clicked.connect(self.pushButtonFilterClicked)
self.pushButtonLoad.clicked.connect(self.pushButtonLoadClicked)
self.pushButtonDelete.clicked.connect(self.pushButtonDeleteClicked)
sizePolicy = QSizePolicy(QSizePolicy.Minimum, QSizePolicy.Minimum)
self.pushButtonDelete.setSizePolicy(sizePolicy)
self.threadpool = QThreadPool()
self.previous_mac = "Default mac string"
def pushButtonFilterClicked(self):
worker = Worker()
self.threadpool.start(worker)
# Filtrowanie pacjentow
self.chooseToEditComboBox.clear()
# print("Wybor czujnika... ")
seekToEdit = self.filterToEditLineEdit.text()
# print(seekToEdit)
try:
cursor.execute("SELECT cz.ID_czujnika, cz.MAC_czujnika, IFNULL(pac.imie,'-'), IFNULL(pac.nazwisko,'-')\
FROM czujniki cz\
LEFT JOIN przydzial_czujnikow prz\
ON cz.ID_czujnika=prz.ID_czujnika\
LEFT JOIN pacjenci pac\
ON prz.ID_pacjenta=pac.ID_pacjenta\
WHERE cz.ID_czujnika LIKE \'%{seek}%\' OR cz.MAC_czujnika LIKE \'%{seek}%\'\
OR pac.imie LIKE \'%{seek}%\' OR pac.nazwisko LIKE \'%{seek}%\'".format(seek=seekToEdit))
# LEFT JOIN ma na celu pokazanie rowniez czujnikow nie przypisanych do zadnego pacjenta
# wyswietlanie imienia i nazwiska obok ID oraz MAC ma na celu podpowiedzenie uzytkownikowi, kogo dotyczy wybrany czujnik, czy jest "wolny"
# usunac przedrostek BINARY, jezeli sie chce case_insensitive
# cursor.execute("SELECT imie, nazwisko FROM pacjenci")
# print("...SELECT query succeeded...")
# OK.... ale teraz jak w matplotlibie okreslic DATĘ jako os X, i x_axis jako os Y (x_axis to wartosci, os pionowa)
myresult = cursor.fetchall()
# # print("The length of \'myresult\' is: ", len(myresult)) # pokazuje ile rekordow ma zostac wykorzystanych na wykresie
czujniki = []
for x in myresult:
czujniki.append(str(x[0])+" "+str(x[1])+" "+str(x[2])+" "+str(x[3]))
self.chooseToEditComboBox.addItems(czujniki)
except:
pass
# print("SELECT query failed")
def pushButtonLoadClicked(self):
worker = Worker()
self.threadpool.start(worker)
# print("Ladowanie danych czujnika... ")
# seekHist = self.filterToEditLineEdit.text()
# # print(seekHist)
wybrany_czujnik = self.chooseToEditComboBox.currentText()
try:
wybrany_czujnik = wybrany_czujnik.split()
wybrane_id = wybrany_czujnik[0]
except:
pass
try:
cursor.execute("SELECT ID_czujnika, MAC_czujnika FROM czujniki WHERE ID_czujnika={jakie_id}".format(jakie_id=wybrane_id))
# usunac przedrostek BINARY, jezeli sie chce case_insensitive
# cursor.execute("SELECT imie, nazwisko FROM pacjenci")
# print("...SELECT query succeeded...")
# OK.... ale teraz jak w matplotlibie okreslic DATĘ jako os X, i x_axis jako os Y (x_axis to wartosci, os pionowa)
myresult = cursor.fetchall()
# # print("The length of \'myresult\' is: ", len(myresult)) # pokazuje ile rekordow ma zostac wykorzystanych na wykresie
# pacjenci = []
for x in myresult:
# pacjenci.append(str(x[0])+" "+str(x[1]))
self.idLineEdit.setText(str(x[0]))
self.macLineEdit.setText(str(x[1]))
###################################################################
except:
pass
# print("SELECT query failed")
self.previous_mac = self.macLineEdit.text()
def pushButtonAbortClicked(self):
edit_patient_window.hide()
worker = Worker()
self.threadpool.start(worker)
def pushButtonSaveChangesClicked(self):
worker = Worker()
self.threadpool.start(worker)
noweID = self.idLineEdit.text()
nowyMAC = self.macLineEdit.text()
#Writing Query to insert data
# Przekazanie, ktora osoba ma zostac edytowana do buttona potwierdzajacego i wykonujacego UPDATE
# Pobranie tych danych z aktualnego ComboBoxa
wybrany_czujnik = self.chooseToEditComboBox.currentText()
try:
wybrany_czujnik = wybrany_czujnik.split()
wybrane_id = wybrany_czujnik[0]
except:
pass
query = ("UPDATE czujniki SET ID_czujnika={ID_czujnika2}, MAC_czujnika=\'{MAC_czujnika2}\' WHERE ID_czujnika={jakie_id}"\
.format(ID_czujnika2=noweID,MAC_czujnika2=nowyMAC,jakie_id=wybrane_id))
# taxi = (imie, nazwisko, plec, data_urodzenia, PESEL ,telefon, email, kod_pocztowy, miejscowosc, ulica) # zamiast jedynki mozna wrzucic zmienna pobraną z pola EditText (trzeba takie dodać) gdzie uzytkownik wpisze numer czujnika z palca LUB jego ID
try:
cursor.execute(query) #Execute the Query
cnx.commit()
# print("Zmieniono dane czujnika.")
# Czyszczenie wprowadzonego tekstu
self.idLineEdit.setText("")
self.macLineEdit.setText("")
self.pushButtonFilterClicked()
notification_win.label.setText("Zmieniono dane czujnika.")
notification_win.show()
# TODO # zarejestrowac ta akcje w logach zdarzen
###################### #log #rejestr #zdarzenie ########################################################################################
cursor.execute("SELECT ID_pracownika FROM personel WHERE login LIKE \"{jaki_login}\"".format(jaki_login=window.current_user))
ID_pracownika = cursor.fetchall()[0][0]
# # print("Wyswietlanie ID pracownika na podstawie loginu...")
# # print(ID_pracownika)
query = ("INSERT INTO rejestr_zdarzen (ID_pracownika,rodzaj_zdarzenia,opis_zdarzenia) VALUES (%s, %s, %s)")
taxi = (ID_pracownika, "Zmiana MAC czujnika z {stary_mac} na {jaki_mac}".format(stary_mac=self.previous_mac, jaki_mac=nowyMAC), "")
cursor.execute(query, taxi)
cnx.commit()
window.eventList.insertItem(0, "Zmiana MAC czujnika z {stary_mac} na {jaki_mac}, ".format(stary_mac=self.previous_mac, jaki_mac=nowyMAC)+str(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')))
########################################################################################################################################
except:
notification_win.label.setText("Niepoprawne dane. Zwróć uwagę, czy data urodzenia oraz email mają poprawny format.")
notification_win.show()
cnx.rollback()
def pushButtonDeleteClicked(self):
worker = Worker()
self.threadpool.start(worker)
#Writing Query to insert data
# Przekazanie, ktora osoba ma zostac edytowana do buttona potwierdzajacego i wykonujacego UPDATE
# Pobranie tych danych z aktualnego ComboBoxa
### Czekanie na potwierdzenie ...
# TODO pytanie o potwierdzenie skasowania pacjenta
# w zamysle po potwierdzeniu usuniecia w oknie delete_confirm_window powinno sie zamknac to okno i kontynuowac operacje ponizej czyli usuniecie pacjenta
confirmed = 1
# delete_confirm_window.show()
qm = QMessageBox
ret = qm.question(self,'', "Czy na pewno chcesz usunąć ten czujnik?", qm.Yes | qm.No)
if ret == qm.Yes:
try:
wybrany_czujnik = self.chooseToEditComboBox.currentText()
wybrany_czujnik = wybrany_czujnik.split()
wybrane_id = wybrany_czujnik[0]
mac_usuwanego_czujnika = wybrany_czujnik[1]
query = ("DELETE FROM czujniki WHERE ID_czujnika={jakie_id}".\
format(jakie_id=int(wybrane_id)))
# taxi = (imie, nazwisko, plec, data_urodzenia, PESEL ,telefon, email, kod_pocztowy, miejscowosc, ulica) # zamiast jedynki mozna wrzucic zmienna pobraną z pola EditText (trzeba takie dodać) gdzie uzytkownik wpisze numer czujnika z palca LUB jego ID
cursor.execute(query) #Execute the Query
cnx.commit()
# print("Usunieto czujnik z bazy, MAC: {jaki_mac}.".format(jaki_mac=mac_usuwanego_czujnika))
# Czyszczenie wprowadzonego tekstu
self.idLineEdit.setText("")
self.macLineEdit.setText("")
self.pushButtonFilterClicked()
notification_win.label.setText("Usunieto czujnik z bazy danych. MAC: {jaki_mac}.".format(jaki_mac=mac_usuwanego_czujnika))
notification_win.show()
# TODO # zarejestrowac ta akcje w logach zdarzen
###################### #log #rejestr #zdarzenie ########################################################################################
cursor.execute("SELECT ID_pracownika FROM personel WHERE login LIKE \"{jaki_login}\"".format(jaki_login=window.current_user))
ID_pracownika = cursor.fetchall()[0][0]
# # print("Wyswietlanie ID pracownika na podstawie loginu...")
# # print(ID_pracownika)
query = ("INSERT INTO rejestr_zdarzen (ID_pracownika,rodzaj_zdarzenia,opis_zdarzenia) VALUES (%s, %s, %s)")
taxi = (ID_pracownika, "Usunieto czujnik, MAC: {jaki_mac}".format(jaki_mac=mac_usuwanego_czujnika), "")
cursor.execute(query, taxi)
cnx.commit()
window.eventList.insertItem(0, "Usunieto czujnik, MAC: {jaki_mac}, ".format(jaki_mac=mac_usuwanego_czujnika)+str(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')))
########################################################################################################################################
except:
notification_win.label.setText("Wystapil problem podczas usuwania czujnika. Sprawdz czy pacjent zostal wybrany.")
notification_win.show()
cnx.rollback()
class assign_sensor(QMainWindow): #
def __init__(self):
QMainWindow.__init__(self)
loadUi('assign_sensor_gui.ui', self)
self.setWindowTitle("Zmiana przypisania czujnikow")
self.pushButtonAbort.clicked.connect(self.pushButtonAbortClicked)
self.pushButtonFilter.clicked.connect(self.pushButtonFilterClicked)
self.pushButtonAssign.clicked.connect(self.pushButtonAssignClicked)
self.pushButtonFilterEditPatient.clicked.connect(self.pushButtonFilterEditPatientClicked)
self.pushButtonDelete.clicked.connect(self.pushButtonDeleteClicked)
#-------------------------------------------------------------------------- nie dokonczone skalowanie okna
sizePolicy = QSizePolicy(QSizePolicy.Minimum, QSizePolicy.Minimum)
self.pushButtonDelete.setSizePolicy(sizePolicy)
self.threadpool = QThreadPool()
self.MAC_assigned = "Replace with ID..."
self.assigned_to_name = "Replace with name"
self.assigned_to_surname = "Replace with surname"
def pushButtonFilterClicked(self):
# Filtrowanie pacjentow
self.chooseToEditComboBox.clear()
# print("Wybor czujnika... ")
worker = Worker()
self.threadpool.start(worker)
seekToEdit = self.filterToEditLineEdit.text()
# print(seekToEdit)
try:
cursor.execute("SELECT cz.ID_czujnika, cz.MAC_czujnika, IFNULL(pac.imie,'-'), IFNULL(pac.nazwisko,'-')\
FROM czujniki cz\
LEFT JOIN przydzial_czujnikow prz\
ON cz.ID_czujnika=prz.ID_czujnika\
LEFT JOIN pacjenci pac\
ON prz.ID_pacjenta=pac.ID_pacjenta\
WHERE cz.ID_czujnika LIKE BINARY \'%{seek}%\' OR cz.MAC_czujnika LIKE BINARY \'%{seek}%\'\
OR pac.imie LIKE BINARY \'%{seek}%\' OR pac.nazwisko LIKE BINARY \'%{seek}%\'".format(seek=seekToEdit))
# LEFT JOIN ma na celu pokazanie rowniez czujnikow nie przypisanych do zadnego pacjenta
# wyswietlanie imienia i nazwiska obok ID oraz MAC ma na celu podpowiedzenie uzytkownikowi, kogo dotyczy wybrany czujnik, czy jest "wolny"
# usunac przedrostek BINARY, jezeli sie chce case_insensitive
# cursor.execute("SELECT imie, nazwisko FROM pacjenci")
# print("...SELECT query succeeded...")
# OK.... ale teraz jak w matplotlibie okreslic DATĘ jako os X, i x_axis jako os Y (x_axis to wartosci, os pionowa)
myresult = cursor.fetchall()
# # print("The length of \'myresult\' is: ", len(myresult)) # pokazuje ile rekordow ma zostac wykorzystanych na wykresie
czujniki = []
for x in myresult:
czujniki.append(str(x[0])+" "+str(x[1])+" "+str(x[2])+" "+str(x[3]))
self.chooseToEditComboBox.addItems(czujniki)
except:
pass
# print("SELECT query failed")
def pushButtonFilterEditPatientClicked(self):
# Filtrowanie pacjentow
self.patientToEditComboBox.clear()
worker = Worker()
self.threadpool.start(worker)
# print("Wybor pacjentow... ")
seekToEdit = self.filterPatientLineEdit.text()
# print(seekToEdit)
try:
cursor.execute("SELECT ID_pacjenta, imie, nazwisko FROM pacjenci WHERE imie LIKE BINARY \'%{seek}%\' OR nazwisko LIKE BINARY \'%{seek}%\' OR ID_pacjenta LIKE BINARY \'%{seek}%\'".format(seek=seekToEdit))
# usunac przedrostek BINARY, jezeli sie chce case_insensitive
# cursor.execute("SELECT imie, nazwisko FROM pacjenci")
# print("...SELECT query succeeded...")
# OK.... ale teraz jak w matplotlibie okreslic DATĘ jako os X, i x_axis jako os Y (x_axis to wartosci, os pionowa)
myresult = cursor.fetchall()
# # print("The length of \'myresult\' is: ", len(myresult)) # pokazuje ile rekordow ma zostac wykorzystanych na wykresie
pacjenci = []
for x in myresult:
pacjenci.append(str(x[0])+" "+str(x[1])+" "+str(x[2]))
self.patientToEditComboBox.addItems(pacjenci)
###################################################################
except:
pass
def pushButtonAbortClicked(self):
assign_sensor_window.hide()
worker = Worker()
self.threadpool.start(worker)
def pushButtonAssignClicked(self):
worker = Worker()
self.threadpool.start(worker)
#Writing Query to insert data
# Przekazanie, ktora osoba ma zostac edytowana do buttona potwierdzajacego i wykonujacego UPDATE
# Pobranie tych danych z aktualnego ComboBoxa
wybrany_czujnik = self.chooseToEditComboBox.currentText()
wybrany_pacjent = self.patientToEditComboBox.currentText()
# do ustawienia comboboxow na okreslonych elementach, po wykonaniu zmian
id_of_assigned = self.chooseToEditComboBox.currentIndex()
id_of_chosen_patient = self.patientToEditComboBox.currentIndex()
try:
wybrany_czujnik = wybrany_czujnik.split()
wybrane_id = wybrany_czujnik[0]
MAC_assigned = wybrany_czujnik[1]
wybrany_pacjent = wybrany_pacjent.split()
wybrane_id_pacjenta = wybrany_pacjent[0]
wybrane_imie = wybrany_pacjent[1]
wybrane_nazwisko = wybrany_pacjent[2]
# print("Udalo sie odczytac dane z ComboBoxow")
except:
pass
query = ("INSERT INTO przydzial_czujnikow (ID_pacjenta,ID_czujnika,status) VALUES ({ID_pacjenta_2},{ID_czujnika_2},'default')"\
.format(ID_pacjenta_2=wybrane_id_pacjenta,ID_czujnika_2=wybrane_id))
# print("query: "+query)
# taxi = (imie, nazwisko, plec, data_urodzenia, PESEL ,telefon, email, kod_pocztowy, miejscowosc, ulica) # zamiast jedynki mozna wrzucic zmienna pobraną z pola EditText (trzeba takie dodać) gdzie uzytkownik wpisze numer czujnika z palca LUB jego ID
try:
cursor.execute(query) #Execute the Query
cnx.commit()
# print("Dodano nowe przypisanie.")
# Czyszczenie wprowadzonego tekstu
self.filterToEditLineEdit.setText("")
self.filterPatientLineEdit.setText("")
self.pushButtonFilterClicked()
self.chooseToEditComboBox.setCurrentIndex(id_of_assigned)
self.pushButtonFilterEditPatientClicked()
self.patientToEditComboBox.setCurrentIndex(id_of_chosen_patient)
notification_win.label.setText("Dodano nowe przypisanie.")
notification_win.show()
# TODO # zarejestrowac ta akcje w logach zdarzen
###################### #log #rejestr #zdarzenie ########################################################################################
cursor.execute("SELECT ID_pracownika FROM personel WHERE login LIKE \"{jaki_login}\"".format(jaki_login=window.current_user))
ID_pracownika = cursor.fetchall()[0][0]
# # print("Wyswietlanie ID pracownika na podstawie loginu...")
# # print(ID_pracownika)
query = ("INSERT INTO rejestr_zdarzen (ID_pracownika,rodzaj_zdarzenia,opis_zdarzenia) VALUES (%s, %s, %s)")
taxi = (ID_pracownika, "Przypisano czujnik, MAC: {jaki_mac} pacjentowi {jakie_imie} {jakie_nazwisko}, ".format(jaki_mac=MAC_assigned,jakie_imie=wybrane_imie,jakie_nazwisko=wybrane_nazwisko), "")
cursor.execute(query, taxi)
cnx.commit()
window.eventList.insertItem(0, "Przypisano czujnik, MAC: {jaki_mac} pacjentowi {jakie_imie} {jakie_nazwisko}, ".format(jaki_mac=MAC_assigned,jakie_imie=wybrane_imie,jakie_nazwisko=wybrane_nazwisko)+str(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')))
########################################################################################################################################
except:
notification_win.label.setText("Nie udalo się dodać przypisania.\nWybrany czujnik może już być przypisany do innego pacjenta.\n\nUsuń przypisanie i spróbuj ponownie.")
notification_win.show()
cnx.rollback()
pass
def pushButtonDeleteClicked(self):
worker = Worker()
self.threadpool.start(worker)
#Writing Query to insert data
# Przekazanie, ktora osoba ma zostac edytowana do buttona potwierdzajacego i wykonujacego UPDATE
# Pobranie tych danych z aktualnego ComboBoxa
### Czekanie na potwierdzenie ...
# TODO pytanie o potwierdzenie skasowania pacjenta
# w zamysle po potwierdzeniu usuniecia w oknie delete_confirm_window powinno sie zamknac to okno i kontynuowac operacje ponizej czyli usuniecie pacjenta
confirmed = 1
# delete_confirm_window.show()
qm = QMessageBox
ret = qm.question(self,'', "Czy na pewno chcesz usunąć to przypisanie?", qm.Yes | qm.No)
if ret == qm.Yes:
try:
wybrany_czujnik = self.chooseToEditComboBox.currentText()
wybrany_czujnik = wybrany_czujnik.split()
wybrane_id = wybrany_czujnik[0]
id_of_deleted = self.chooseToEditComboBox.currentIndex()
id_of_chosen_patient = self.patientToEditComboBox.currentIndex()
query = ("DELETE FROM przydzial_czujnikow WHERE ID_czujnika={jakie_id}".\
format(jakie_id=int(wybrane_id)))
# taxi = (imie, nazwisko, plec, data_urodzenia, PESEL ,telefon, email, kod_pocztowy, miejscowosc, ulica) # zamiast jedynki mozna wrzucic zmienna pobraną z pola EditText (trzeba takie dodać) gdzie uzytkownik wpisze numer czujnika z palca LUB jego ID
cursor.execute(query) #Execute the Query
cnx.commit()
# print("Usunieto czujnik z bazy.")
######################################### potrzebne do rejestru zdarzen
wybrany_czujnik = self.chooseToEditComboBox.currentText()
wybrany_pacjent = self.patientToEditComboBox.currentText()
try:
wybrany_czujnik = wybrany_czujnik.split()
wybrane_id = wybrany_czujnik[0]
MAC_assigned = wybrany_czujnik[1]
wybrany_pacjent = wybrany_pacjent.split()
wybrane_id_pacjenta = wybrany_pacjent[0]
wybrane_imie = wybrany_pacjent[1]
wybrane_nazwisko = wybrany_pacjent[2]
except:
pass
#########################################
# Czyszczenie wprowadzonego tekstu
self.filterToEditLineEdit.setText("")
self.filterPatientLineEdit.setText("")
self.pushButtonFilterClicked()
self.chooseToEditComboBox.setCurrentIndex(id_of_deleted)
self.pushButtonFilterEditPatientClicked()
self.patientToEditComboBox.setCurrentIndex(id_of_chosen_patient)
notification_win.label.setText("Usunieto przypisanie z bazy.")
notification_win.show()
# TODO # zarejestrowac ta akcje w logach zdarzen
###################### #log #rejestr #zdarzenie ########################################################################################
cursor.execute("SELECT ID_pracownika FROM personel WHERE login LIKE \"{jaki_login}\"".format(jaki_login=window.current_user))
ID_pracownika = cursor.fetchall()[0][0]
# # print("Wyswietlanie ID pracownika na podstawie loginu...")
# # print(ID_pracownika)
query = ("INSERT INTO rejestr_zdarzen (ID_pracownika,rodzaj_zdarzenia,opis_zdarzenia) VALUES (%s, %s, %s)")
taxi = (ID_pracownika, "Usunieto przypisanie czujnika, MAC: {jaki_mac} , pacjent: {jakie_imie} {jakie_nazwisko}, ".format(jaki_mac=MAC_assigned,jakie_imie=wybrane_imie,jakie_nazwisko=wybrane_nazwisko), "")
cursor.execute(query, taxi)
cnx.commit()
window.eventList.insertItem(0, "Usunieto przypisanie czujnika, MAC: {jaki_mac} , pacjent: {jakie_imie} {jakie_nazwisko}, ".format(jaki_mac=MAC_assigned,jakie_imie=wybrane_imie,jakie_nazwisko=wybrane_nazwisko)+str(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')))
########################################################################################################################################
except:
notification_win.label.setText("Wystapil problem podczas usuwania przypisania. Sprawdz czy pacjent zostal wybrany.")
notification_win.show()
cnx.rollback()
class new_user(QMainWindow): #
def __init__(self):
QMainWindow.__init__(self)
loadUi('add_user_gui.ui', self)
self.setWindowTitle("Dodawanie nowego pracownika")
self.pushButtonAdd.clicked.connect(self.pushButtonAddClicked)
self.pushButtonAbort.clicked.connect(self.pushButtonAbortClicked)
self.birthDateLineEdit.setPlaceholderText("RRRR-MM-DD")
self.hireDateLineEdit.setPlaceholderText("RRRR-MM-DD")
self.emailLineEdit.setPlaceholderText("email@address.com")
self.passwordLineEdit.setEchoMode(QtWidgets.QLineEdit.Password)
self.sexComboBox.addItem("Mężczyzna")
self.sexComboBox.addItem("Kobieta")
# TODO # wymagac od loginu minimum 5 znakow, od hasla optymalnie 8+ znakow i A-Z, a-z, 0-9
self.threadpool = QThreadPool()
def pushButtonAbortClicked(self):
new_patient_window.hide()
worker = Worker()
self.threadpool.start(worker)
def pushButtonAddClicked(self):
imie = self.nameLineEdit.text()
nazwisko = self.surnameLineEdit.text()
# plec = self.sexLineEdit.text()
plec = self.sexComboBox.currentText()
data_urodzenia = self.birthDateLineEdit.text()
PESEL = self.peselLineEdit.text()
data_zatrudnienia = self.hireDateLineEdit.text()
login = self.loginLineEdit.text()
zaszyfrowane_haslo = encrypt_string(self.passwordLineEdit.text()) # zamiana hasla jawnego na hash
# print(zaszyfrowane_haslo) # TODO # mozna skasowac, wyswietlenie kontrolne
telefon = self.nameLineEdit.text()
email = self.emailLineEdit.text()
kod_pocztowy = self.cityCodeLineEdit.text()
miejscowosc = self.cityLineEdit.text()
ulica = self.streetLineEdit.text()
worker = Worker()
self.threadpool.start(worker)
#Writing Query to insert data
query = ("INSERT INTO personel (imie, nazwisko, plec, data_urodzenia, PESEL, data_zatrudnienia, login, zaszyfrowane_haslo, telefon, email, kod_pocztowy, miejscowosc, ulica) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)")
taxi = (imie, nazwisko, plec, data_urodzenia, PESEL , data_zatrudnienia, login, zaszyfrowane_haslo, telefon, email, kod_pocztowy, miejscowosc, ulica) # zamiast jedynki mozna wrzucic zmienna pobraną z pola EditText (trzeba takie dodać) gdzie uzytkownik wpisze numer czujnika z palca LUB jego ID
try:
cursor.execute(query, taxi) #Execute the Query
cnx.commit()
# print("Dodano nowego pracownika.")
self.nameLineEdit.setText("")
self.surnameLineEdit.setText("")
# self.sexLineEdit.setText("")
self.sexComboBox.clear()
self.sexComboBox.addItem("Mężczyzna")
self.sexComboBox.addItem("Kobieta")
self.birthDateLineEdit.setText("")
self.peselLineEdit.setText("")
self.hireDateLineEdit.setText("")
self.loginLineEdit.setText("")
self.passwordLineEdit.setText("")
self.phoneLineEdit.setText("")
self.emailLineEdit.setText("")
self.cityCodeLineEdit.setText("")
self.cityLineEdit.setText("")
self.streetLineEdit.setText("")
notification_win.label.setText("Dodano nowego pracownika.")
notification_win.show()
# TODO # zarejestrowac ta akcje w logach zdarzen
###################### #log #rejestr #zdarzenie ########################################################################################
cursor.execute("SELECT ID_pracownika FROM personel WHERE login LIKE \"{jaki_login}\"".format(jaki_login=window.current_user))
ID_pracownika = cursor.fetchall()[0][0]
# # print("Wyswietlanie ID pracownika na podstawie loginu...")
# # print(ID_pracownika)
query = ("INSERT INTO rejestr_zdarzen (ID_pracownika,rodzaj_zdarzenia,opis_zdarzenia) VALUES (%s, %s, %s)")
taxi = (ID_pracownika, "Dodano pracownika {jakie_imie} {jakie_nazwisko}, ".format(jakie_imie=imie,jakie_nazwisko=nazwisko), "")
cursor.execute(query, taxi)
cnx.commit()
window.eventList.insertItem(0, "Dodano pracownika {jakie_imie} {jakie_nazwisko}, ".format(jakie_imie=imie,jakie_nazwisko=nazwisko)+str(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')))
########################################################################################################################################
new_user_window.hide()
except:
# print("Niepoprawne dane. Zwróć uwagę, czy data urodzenia oraz email mają poprawny format.")
cnx.rollback()
class edit_user(QMainWindow): #
def __init__(self):
QMainWindow.__init__(self)
loadUi('edit_user_gui.ui', self)
self.setWindowTitle("Edycja danych pracownika")
self.pushButtonSaveChanges.clicked.connect(self.pushButtonSaveChangesClicked)
self.pushButtonAbort.clicked.connect(self.pushButtonAbortClicked)
self.birthDateLineEdit.setPlaceholderText("RRRR-MM-DD")
self.hireDateLineEdit.setPlaceholderText("RRRR-MM-DD")
self.emailLineEdit.setPlaceholderText("email@address.com")
self.pushButtonFilterEditUser.clicked.connect(self.pushButtonFilterEditUserClicked)
self.pushButtonLoadToEditUser.clicked.connect(self.pushButtonLoadToEditUserClicked)
self.pushButtonDeleteUser.clicked.connect(self.pushButtonDeleteUserClicked)
self.pushButtonChangePass.clicked.connect(self.pushButtonChangePassClicked)
self.oldPassLineEdit.setEchoMode(QtWidgets.QLineEdit.Password)
self.newPassLineEdit.setEchoMode(QtWidgets.QLineEdit.Password)
self.newPassRepeatLineEdit.setEchoMode(QtWidgets.QLineEdit.Password)
self.sexComboBox.addItem("Mężczyzna")
self.sexComboBox.addItem("Kobieta")
self.threadpool = QThreadPool()
def pushButtonChangePassClicked(self):
worker = Worker()
self.threadpool.start(worker)
oldPass = encrypt_string(self.oldPassLineEdit.text())
newPass = encrypt_string(self.newPassLineEdit.text())
newPassRepeat = encrypt_string(self.newPassRepeatLineEdit.text())
login = self.loginLineEdit.text()
cursor.execute("SELECT zaszyfrowane_haslo FROM personel WHERE login LIKE \"{jaki_login}\"".format(jaki_login=login)) #Execute the Query
myresult = cursor.fetchall() # przeczytany hasz wlasciwego hasla # zakomentowac oba wiersze
myresult = myresult[0][0]
# # print(myresult) # kontrolnie, pokazanie HASZU hasla z bazy
# # print(oldPass)
if myresult==oldPass:
if newPass==newPassRepeat:
try:
cursor.execute("UPDATE personel SET zaszyfrowane_haslo = \'{new_password}\' WHERE login LIKE \"{jaki_login}\"".format(new_password=newPass,jaki_login=login))
self.loginLineEdit.setText("")
self.oldPassLineEdit.setText("")
self.newPassLineEdit.setText("")
self.newPassRepeatLineEdit.setText("")
notification_win.label.setText("Haslo zostalo zmienione.")
notification_win.show()
# TODO # zarejestrowac ta akcje w logach zdarzen
###################### #log #rejestr #zdarzenie ########################################################################################
cursor.execute("SELECT ID_pracownika FROM personel WHERE login LIKE \"{jaki_login}\"".format(jaki_login=window.current_user)) # informacja o tym, na czyim koncie dokonano zmian
ID_pracownika = cursor.fetchall()[0][0]
cursor.execute("SELECT imie, nazwisko FROM personel WHERE login LIKE \"{jaki_login}\"".format(jaki_login=login)) # imie i nazwisko osoby, ktorej haslo zostalo zmienione
imie_i_nazwisko = cursor.fetchall()
wybrane_imie = imie_i_nazwisko[0][0]
wybrane_nazwisko = imie_i_nazwisko[0][1]
# # print("Wyswietlanie ID pracownika na podstawie loginu...")
# # print(ID_pracownika)
query = ("INSERT INTO rejestr_zdarzen (ID_pracownika,rodzaj_zdarzenia,opis_zdarzenia) VALUES (%s, %s, %s)")
taxi = (ID_pracownika, "Zmieniono haslo pracownika {jakie_imie} {jakie_nazwisko}".format(jakie_imie=wybrane_imie,jakie_nazwisko=wybrane_nazwisko), "")
cursor.execute(query, taxi)
cnx.commit()
window.eventList.insertItem(0, "Zmieniono haslo pracownika {jakie_imie} {jakie_nazwisko}, ".format(jakie_imie=wybrane_imie,jakie_nazwisko=wybrane_nazwisko)+str(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')))
########################################################################################################################################
except:
notification_win.label.setText("Nie udalo sie zmienic hasla.")
notification_win.show()
window.eventList.insertItem(0, "Blad podczas proby zmiany hasla, "+str(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')))
else:
notification_win.label.setText("Nieudana proba zmiany hasla.")
notification_win.show()
window.eventList.insertItem(0, "Nieudana proba zmiany hasla, "+str(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')))
def pushButtonFilterEditUserClicked(self):
# Filtrowanie pacjentow
self.userToEditComboBox.clear()
worker = Worker()
self.threadpool.start(worker)
# print("Wybor pracownikow... ")
seekToEdit = self.filterToEditLineEdit.text()
# print(seekToEdit)
try:
cursor.execute("SELECT imie, nazwisko FROM personel WHERE imie LIKE BINARY \'%{seek}%\' OR nazwisko LIKE BINARY \'%{seek}%\' OR ID_pracownika LIKE BINARY \'%{seek}%\'".format(seek=seekToEdit))
# usunac przedrostek BINARY, jezeli sie chce case_insensitive
# cursor.execute("SELECT imie, nazwisko FROM pacjenci")
# print("...SELECT query succeeded...")
# OK.... ale teraz jak w matplotlibie okreslic DATĘ jako os X, i x_axis jako os Y (x_axis to wartosci, os pionowa)
myresult = cursor.fetchall()
# # print("The length of \'myresult\' is: ", len(myresult)) # pokazuje ile rekordow ma zostac wykorzystanych na wykresie
pracownicy = []
for x in myresult:
pracownicy.append(str(x[0])+" "+str(x[1]))
self.userToEditComboBox.addItems(pracownicy)
###################################################################
except:
pass
def pushButtonLoadToEditUserClicked(self):
worker = Worker()
self.threadpool.start(worker)
# print("Ladowanie danych pracownika... ")
# seekHist = self.filterToEditLineEdit.text()
# # print(seekHist)
wybrany_pracownik = self.userToEditComboBox.currentText()
try:
wybrany_pracownik = wybrany_pracownik.split()
wybrane_imie = wybrany_pracownik[0]
wybrane_nazwisko = wybrany_pracownik[1]
except:
pass
try:
cursor.execute("SELECT imie, nazwisko, plec, data_urodzenia, PESEL, data_zatrudnienia, telefon, email, kod_pocztowy, miejscowosc, ulica FROM personel WHERE imie LIKE \'%{imie}%\' AND nazwisko LIKE \'%{nazwisko}%\'".format(imie=wybrane_imie, nazwisko=wybrane_nazwisko))
# usunac przedrostek BINARY, jezeli sie chce case_insensitive
# cursor.execute("SELECT imie, nazwisko FROM pacjenci")
# print("...SELECT query succeeded...")
# OK.... ale teraz jak w matplotlibie okreslic DATĘ jako os X, i x_axis jako os Y (x_axis to wartosci, os pionowa)
myresult = cursor.fetchall()
# # print("The length of \'myresult\' is: ", len(myresult)) # pokazuje ile rekordow ma zostac wykorzystanych na wykresie
# pacjenci = []
for x in myresult:
# pacjenci.append(str(x[0])+" "+str(x[1]))
self.nameLineEdit.setText(str(x[0]))
self.surnameLineEdit.setText(str(x[1]))
# self.sexLineEdit.setText(str(x[2]))
self.sexComboBox.clear()
self.sexComboBox.addItem(str(x[2]))
if self.sexComboBox.currentText()[0]=="M":
self.sexComboBox.addItem("Kobieta")
else:
self.sexComboBox.addItem("Mezczyzna")
self.birthDateLineEdit.setText(str(x[3]))
self.peselLineEdit.setText(str(x[4]))
self.hireDateLineEdit.setText(str(x[5]))
self.phoneLineEdit.setText(str(x[6]))
self.emailLineEdit.setText(str(x[7]))
self.cityCodeLineEdit.setText(str(x[8]))
self.cityLineEdit.setText(str(x[9]))
self.streetLineEdit.setText(str(x[10]))
###################################################################
except:
pass
def pushButtonAbortClicked(self):
edit_patient_window.hide()
worker = Worker()
self.threadpool.start(worker)
def pushButtonSaveChangesClicked(self):
noweImie = self.nameLineEdit.text()
noweNazwisko = self.surnameLineEdit.text()
nowaPlec = self.sexComboBox.currentText()
nowaData_urodzenia = self.birthDateLineEdit.text()
nowyPESEL = self.peselLineEdit.text()
nowaData_zatrudnienia = self.hireDateLineEdit.text()
nowyTelefon = self.phoneLineEdit.text()
nowyEmail = self.emailLineEdit.text()
nowyKod_pocztowy = self.cityCodeLineEdit.text()
nowaMiejscowosc = self.cityLineEdit.text()
nowaUlica = self.streetLineEdit.text()
worker = Worker()
self.threadpool.start(worker)
#Writing Query to insert data
# Przekazanie, ktora osoba ma zostac edytowana do buttona potwierdzajacego i wykonujacego UPDATE
# Pobranie tych danych z aktualnego ComboBoxa
wybrany_pacjent = self.userToEditComboBox.currentText()
try:
wybrany_pacjent = wybrany_pacjent.split()
wybrane_imie = wybrany_pacjent[0]
wybrane_nazwisko = wybrany_pacjent[1]
except:
pass
query = ("UPDATE personel SET imie=\'{imie2}\', nazwisko=\'{nazwisko2}\', plec=\'{plec2}\', data_urodzenia=\'{data_urodzenia2}\', PESEL=\'{PESEL2}\',\
data_zatrudnienia=\'{data_zatrudnienia2}\', telefon=\'{telefon2}\', email=\'{email2}\', kod_pocztowy=\'{kod_pocztowy2}\', miejscowosc=\'{miejscowosc2}\', ulica=\'{ulica2}\' WHERE imie LIKE\
\'{jakie_imie}\' AND nazwisko LIKE '\{jakie_nazwisko}\'".format(imie2=noweImie,nazwisko2=noweNazwisko,plec2=nowaPlec,\
data_urodzenia2=nowaData_urodzenia,PESEL2=nowyPESEL,data_zatrudnienia2=nowaData_zatrudnienia,telefon2=nowyTelefon,email2=nowyEmail,kod_pocztowy2=nowyKod_pocztowy,\
miejscowosc2=nowaMiejscowosc,ulica2=nowaUlica,jakie_imie=wybrane_imie,jakie_nazwisko=wybrane_nazwisko))
# taxi = (imie, nazwisko, plec, data_urodzenia, PESEL ,telefon, email, kod_pocztowy, miejscowosc, ulica) # zamiast jedynki mozna wrzucic zmienna pobraną z pola EditText (trzeba takie dodać) gdzie uzytkownik wpisze numer czujnika z palca LUB jego ID
try:
cursor.execute(query) #Execute the Query
cnx.commit()
# print("Zmieniono dane pracownika.")
# Czyszczenie wprowadzonego tekstu
self.nameLineEdit.setText("")
self.surnameLineEdit.setText("")
# self.sexLineEdit.setText("")
self.sexComboBox.clear()
self.sexComboBox.addItem("Mężczyzna")
self.sexComboBox.addItem("Kobieta")
self.birthDateLineEdit.setText("")
self.peselLineEdit.setText("")
self.hireDateLineEdit.setText("")
self.phoneLineEdit.setText("")
self.emailLineEdit.setText("")
self.cityCodeLineEdit.setText("")
self.cityLineEdit.setText("")
self.streetLineEdit.setText("")
notification_win.label.setText("Zmieniono dane pracownika.")
notification_win.show()
# TODO # zarejestrowac ta akcje w logach zdarzen
###################### #log #rejestr #zdarzenie ########################################################################################
cursor.execute("SELECT ID_pracownika FROM personel WHERE login LIKE \"{jaki_login}\"".format(jaki_login=window.current_user))
ID_pracownika = cursor.fetchall()[0][0]
# # print("Wyswietlanie ID pracownika na podstawie loginu...")
# # print(ID_pracownika)
query = ("INSERT INTO rejestr_zdarzen (ID_pracownika,rodzaj_zdarzenia,opis_zdarzenia) VALUES (%s, %s, %s)")
taxi = (ID_pracownika, "Zmieniono dane pracownika {jakie_imie} {jakie_nazwisko}".format(jakie_imie=wybrane_imie,jakie_nazwisko=wybrane_nazwisko), "")
cursor.execute(query, taxi)
cnx.commit()
window.eventList.insertItem(0, "Zmieniono dane pracownika {jakie_imie} {jakie_nazwisko}, ".format(jakie_imie=wybrane_imie,jakie_nazwisko=wybrane_nazwisko)+str(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')))
########################################################################################################################################
except:
notification_win.label.setText("Niepoprawne dane. Zwróć uwagę, czy data urodzenia, data zatrudnienia oraz email mają poprawny format.")
notification_win.show()
cnx.rollback()
def pushButtonDeleteUserClicked(self):
worker = Worker()
self.threadpool.start(worker)
result = None
while result is None:
try:
# auth = input("Podaj haslo do bazy:\n")
cnx = mysql.connector.connect(user = 'user', password = 'userpass',
host = 'localhost',
database = 'main_db')
result = cnx
except:
pass
cursor = cnx.cursor(buffered=True)
#Writing Query to insert data
# Przekazanie, ktora osoba ma zostac edytowana do buttona potwierdzajacego i wykonujacego UPDATE
# Pobranie tych danych z aktualnego ComboBoxa
### Czekanie na potwierdzenie ...
# TODO pytanie o potwierdzenie skasowania pacjenta
# w zamysle po potwierdzeniu usuniecia w oknie delete_confirm_window powinno sie zamknac to okno i kontynuowac operacje ponizej czyli usuniecie pacjenta
confirmed = 1
# delete_confirm_window.show()
# print("Polaczono z baza danych...")
qm = QMessageBox
ret = qm.question(self,'', "Czy na pewno chcesz usunąć tego pracownika?", qm.Yes | qm.No)
if ret == qm.Yes:
try:
wybrany_pracownik = self.userToEditComboBox.currentText()
wybrany_pracownik = wybrany_pracownik.split()
wybrane_imie = wybrany_pracownik[0]
wybrane_nazwisko = wybrany_pracownik[1]
query = ("DELETE FROM personel WHERE imie LIKE \'{jakie_imie}\' AND nazwisko LIKE '\{jakie_nazwisko}\'".\
format(jakie_imie=wybrane_imie,jakie_nazwisko=wybrane_nazwisko))
# taxi = (imie, nazwisko, plec, data_urodzenia, PESEL ,telefon, email, kod_pocztowy, miejscowosc, ulica) # zamiast jedynki mozna wrzucic zmienna pobraną z pola EditText (trzeba takie dodać) gdzie uzytkownik wpisze numer czujnika z palca LUB jego ID
cursor.execute(query) #Execute the Query
cnx.commit()
# print("Usunieto pracownika.")
# Czyszczenie wprowadzonego tekstu
self.nameLineEdit.setText("")
self.surnameLineEdit.setText("")
# self.sexLineEdit.setText("")
self.sexComboBox.clear()
self.sexComboBox.addItem("Mężczyzna")
self.sexComboBox.addItem("Kobieta")
self.birthDateLineEdit.setText("")
self.peselLineEdit.setText("")
self.hireDateLineEdit.setText("")
self.phoneLineEdit.setText("")
self.emailLineEdit.setText("")
self.cityCodeLineEdit.setText("")
self.cityLineEdit.setText("")
self.streetLineEdit.setText("")
notification_win.label.setText("Usunieto pracownika {jakie_imie} {jakie_nazwisko}, ".format(jakie_imie=wybrane_imie,jakie_nazwisko=wybrane_nazwisko))
notification_win.show()
# TODO # zarejestrowac ta akcje w logach zdarzen
###################### #log #rejestr #zdarzenie ########################################################################################
cursor.execute("SELECT ID_pracownika FROM personel WHERE login LIKE \"{jaki_login}\"".format(jaki_login=window.current_user))
ID_pracownika = cursor.fetchall()[0][0]
# # print("Wyswietlanie ID pracownika na podstawie loginu...")
# # print(ID_pracownika)
query = ("INSERT INTO rejestr_zdarzen (ID_pracownika,rodzaj_zdarzenia,opis_zdarzenia) VALUES (%s, %s, %s)")
taxi = (ID_pracownika, "Usunieto pracownika {jakie_imie} {jakie_nazwisko}".format(jakie_imie=wybrane_imie,jakie_nazwisko=wybrane_nazwisko), "")
cursor.execute(query, taxi)
cnx.commit()
window.eventList.insertItem(0, "Usunieto pracownika {jakie_imie} {jakie_nazwisko}, ".format(jakie_imie=wybrane_imie,jakie_nazwisko=wybrane_nazwisko)+str(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')))
########################################################################################################################################
except:
notification_win.label.setText("Wystapil problem podczas usuwania pracownika. Sprawdz czy pracownik zostal wybrany.")
notification_win.show()
cnx.rollback()
class auth(QMainWindow): # OKNO LOGOWANIA DO APLIKACJI ###### PO POMYSLNEJ AUTORYZACJI POKAZUJE SIE GLOWNE OKNO PROGRAMU
def __init__(self):
QMainWindow.__init__(self)
loadUi('auth_gui.ui', self)
self.setWindowTitle("Logowanie do systemu monitoringu")
self.loginButton.clicked.connect(self.loginButtonClicked)
self.abortButton.clicked.connect(self.abortButtonClicked)
self.passwordLineEdit.setEchoMode(QtWidgets.QLineEdit.Password)
################################################################## DO TESTOW ##### POZNIEJ SKASOWAC TE LINIE # TODO
# self.loginLineEdit.setText("admin")
# self.passwordLineEdit.setText("admin")
self.threadpool = QThreadPool()
def loginButtonClicked(self):
worker = Worker()
self.threadpool.start(worker)
login = self.loginLineEdit.text()
password = self.passwordLineEdit.text()
################################################# LOGOWANIE DO APLIKACJI - login i hasło z bazy danych, tabela personel
result0 = None
while result0 is None: # wykonuje sie bez konca, jezeli nie uda sie polaczyc, potrzebne do logowania, ale infinite loop
try:
# auth = input("Podaj haslo do bazy:\n") # przeniesc to do "maina", wykonanie przed poczatkiem programu
cnx = mysql.connector.connect(user = 'user', password = 'userpass', host = 'localhost', database = 'main_db')
result0 = cnx
cursor = cnx.cursor()
# print("...Connection established...")
except:
notification_win.label.setText("Blad polaczenia. Sprawdz czy serwer bazy danych jest uruchomiony.")
notification_win.show()
pass
try:
cursor.execute("SELECT zaszyfrowane_haslo FROM personel WHERE login LIKE \"{jaki_login}\"".format(jaki_login=login)) #Execute the Query
myresult = cursor.fetchall() # przeczytany hasz wlasciwego hasla # zakomentowac oba wiersze
myresult = myresult[0][0]
# # print(myresult) # kontrolnie, pokazanie HASZU hasla z bazy
# # print(encrypt_string(password))
if myresult==encrypt_string(password):
window.show()
auth_win.hide()
# print("Logowanie pomyslne.")
window.current_user = login
###################### #log #rejestr #zdarzenie ########################################################################################
cursor.execute("SELECT ID_pracownika FROM personel WHERE login LIKE \"{jaki_login}\"".format(jaki_login=window.current_user))
ID_pracownika = cursor.fetchall()[0][0]
# # print("Wyswietlanie ID pracownika na podstawie loginu...")
# # print(ID_pracownika)
query = ("INSERT INTO rejestr_zdarzen (ID_pracownika,rodzaj_zdarzenia,opis_zdarzenia) VALUES (%s, %s, %s)")
taxi = (ID_pracownika, "pomyslne logowanie", "")
cursor.execute(query, taxi)
cnx.commit()
window.eventList.insertItem(0, "pomyslne logowanie, "+str(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')))
########################################################################################################################################
except:
# print("Login attempt failed.")
notification_win.label.setText("Niepoprawny login lub hasło.")
notification_win.show()
###################### #log #rejestr #zdarzenie ########################################################################################
query = ("INSERT INTO rejestr_zdarzen (rodzaj_zdarzenia,opis_zdarzenia) VALUES (%s, %s)")
taxi = ("nieudana proba logowania", "")
cursor.execute(query, taxi)
cnx.commit()
window.eventList.insertItem(0, "nieudana proba logowania, "+str(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')))
########################################################################################################################################
def abortButtonClicked(self):
worker = Worker()
self.threadpool.start(worker)
auth_win.close()
class notification(QMainWindow): #
def __init__(self):
QMainWindow.__init__(self)
loadUi('notification.ui', self)
self.setWindowTitle("Informacja")
self.pushButtonOK.clicked.connect(self.pushButtonOKClicked)
self.threadpool = QThreadPool()
def pushButtonOKClicked(self):
worker = Worker()
self.threadpool.start(worker)
self.hide()
class python_to_arduino_msg(QMainWindow): #
def __init__(self):
QMainWindow.__init__(self)
loadUi('komunikat_zwrotny.ui', self)
self.setWindowTitle("Informacja zwrotna do układu pomiarowego")
self.pushButtonFilterEditPatient.clicked.connect(self.pushButtonFilterEditPatientClicked)
self.pushButtonSend.clicked.connect(self.pushButtonSendClicked)
self.msgComboBox.clear()
self.msgComboBox.addItem("1 - Zmień tryb transmisji na ciągły")
self.msgComboBox.addItem("2 - Zmień tryb transmisji na zdarzeniowy")
self.msgComboBox.addItem("3 - Przerwij wysyłanie pomiarów")
self.msgComboBox.addItem("4 - Wznów wysyłanie pomiarów")
self.wybrane_id_czujnika_pacjenta = 0
self.threadpool = QThreadPool()
def pushButtonFilterEditPatientClicked(self):
# Filtrowanie pacjentow
self.patientToEditComboBox.clear()
worker = Worker()
self.threadpool.start(worker)
# print("Wybor pacjentow... ")
seekToEdit = self.filterToEditLineEdit.text()
# print(seekToEdit)
try:
cursor.execute("SELECT pac.imie, pac.nazwisko, prz.ID_czujnika FROM pacjenci pac JOIN przydzial_czujnikow prz ON pac.ID_pacjenta=prz.ID_pacjenta WHERE pac.imie LIKE BINARY \'%{seek}%\' OR pac.nazwisko LIKE BINARY \'%{seek}%\' OR pac.ID_pacjenta LIKE BINARY \'%{seek}%\'".format(seek=seekToEdit))
# usunac przedrostek BINARY, jezeli sie chce case_insensitive
# cursor.execute("SELECT imie, nazwisko FROM pacjenci")
# print("...SELECT query succeeded...")
# OK.... ale teraz jak w matplotlibie okreslic DATĘ jako os X, i x_axis jako os Y (x_axis to wartosci, os pionowa)
myresult = cursor.fetchall()
# # print("The length of \'myresult\' is: ", len(myresult)) # pokazuje ile rekordow ma zostac wykorzystanych na wykresie
pacjenci = []
for x in myresult:
pacjenci.append(str(x[0])+" "+str(x[1])+" czujnik: "+str(x[2]))
self.patientToEditComboBox.addItems(pacjenci)
###################################################################
except:
pass
def pushButtonSendClicked(self):
# print("Wysylanie wiadomosci... ")
worker = Worker()
self.threadpool.start(worker)
# seekHist = self.filterToEditLineEdit.text()
# # print(seekHist)
wybrany_komunikat = self.msgComboBox.currentText()
pelny_komunikat = wybrany_komunikat
try:
wybrany_komunikat = wybrany_komunikat.split()
wybrane_id_komunikatu = wybrany_komunikat[0]
except:
pass
# print("Wybrane ID komunikatu: "+wybrane_id_komunikatu)
ser.close()
board = Arduino(port)
####################################### zakodowanie rodzaju komunikatu na pinach arduino
if wybrane_id_komunikatu==1:
board.digital[6].write(1) # najmlodszy bit z 4 przydzielonych do zakodowania wiadomosci
elif wybrane_id_komunikatu==2:
board.digital[5].write(1)
elif wybrane_id_komunikatu==3:
board.digital[6].write(1)
board.digital[5].write(1)
elif wybrane_id_komunikatu==3:
board.digital[4].write(1)
######################################## zakodowanie ID czujnika
wybrany_pacjent = self.patientToEditComboBox.currentText()
try:
wybrany_pacjent = wybrany_pacjent.split()
###
jaki_pacjent = wybrany_pacjent[0:2]
notification_win.label.setText("Wysłano komunikat nr "+str(pelny_komunikat)+"\ndo czujnika należącego do pacjenta "+(jaki_pacjent[0])+" "+(jaki_pacjent[1]))
notification_win.show()
###################### #log #rejestr #zdarzenie ########################################################################################
cursor.execute("SELECT ID_pracownika FROM personel WHERE login LIKE \"{jaki_login}\"".format(jaki_login=window.current_user))
ID_pracownika = cursor.fetchall()[0][0]
# # print("Wyswietlanie ID pracownika na podstawie loginu...")
# # print(ID_pracownika)
query = ("INSERT INTO rejestr_zdarzen (ID_pracownika,rodzaj_zdarzenia,opis_zdarzenia) VALUES (%s, %s, %s)")
taxi = (ID_pracownika, "Wysłano komunikat nr "+str(pelny_komunikat)+" do czujnika należącego do pacjenta "+(jaki_pacjent[0])+" "+(jaki_pacjent[1]), "")
cursor.execute(query, taxi)
cnx.commit()
window.eventList.insertItem(0, "Wysłano komunikat nr "+str(pelny_komunikat)+" do czujnika należącego do pacjenta "+(jaki_pacjent[0])+" "+(jaki_pacjent[1])+", "+str(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')))
########################################################################################################################################
###
self.wybrane_id_czujnika_pacjenta = wybrany_pacjent[3]
except:
pass
if self.wybrane_id_czujnika_pacjenta==1:
board.digital[11].write(1)
elif self.wybrane_id_czujnika_pacjenta==2:
board.digital[10].write(1)
elif self.wybrane_id_czujnika_pacjenta==3:
board.digital[11].write(1)
board.digital[10].write(1)
elif self.wybrane_id_czujnika_pacjenta==4:
board.digital[9].write(1)
elif self.wybrane_id_czujnika_pacjenta==5:
board.digital[9].write(1)
board.digital[11].write(1)
elif self.wybrane_id_czujnika_pacjenta==6:
board.digital[9].write(1)
board.digital[10].write(1)
elif self.wybrane_id_czujnika_pacjenta==7:
board.digital[9].write(1)
board.digital[10].write(1)
board.digital[11].write(1)
elif self.wybrane_id_czujnika_pacjenta==8:
board.digital[8].write(1)
elif self.wybrane_id_czujnika_pacjenta==9:
board.digital[8].write(1)
board.digital[11].write(1)
elif self.wybrane_id_czujnika_pacjenta==10:
board.digital[8].write(1)
board.digital[10].write(1)
board.exit()
ser.open()
if __name__ == '__main__':
app=QApplication(sys.argv)
app.setStyle('Breeze')
window = main_window()
# window.show() # ten wiersz jest ukryty, bo okno ma się pokazać dopiero po zalogowaniu, mozna odkomentowac do obejscia hasla
new_patient_window = new_patient() # stworzenie okna dodawania nowego pacjenta
edit_patient_window = edit_patient()
new_user_window = new_user()
edit_user_window = edit_user()
new_sensor_window = new_sensor()
edit_sensor_window = edit_sensor()
# delete_confirm_window = delete_patient_confirm() nie jest uzywane, tymczasowo(lub na stałe zastapione poprzez QMessageBox)
auth_win = auth()
auth_win.show()
notification_win = notification()
assign_sensor_window = assign_sensor()
python_to_arduino_msg_win = python_to_arduino_msg()
# new_user_window = new_user()
sys.exit(app.exec_())
| 52.839207
| 308
| 0.556605
| 11,221
| 119,945
| 5.824258
| 0.093218
| 0.020381
| 0.015087
| 0.019096
| 0.788016
| 0.767099
| 0.748141
| 0.719084
| 0.70721
| 0.69044
| 0
| 0.006443
| 0.292176
| 119,945
| 2,270
| 309
| 52.839207
| 0.763333
| 0.201868
| 0
| 0.683526
| 0
| 0.015896
| 0.141497
| 0.013892
| 0
| 0
| 0
| 0.000441
| 0
| 1
| 0.046243
| false
| 0.03974
| 0.013006
| 0
| 0.068642
| 0.002168
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
98fdc2fe024320e8b6596b7d50b1864dbf15f197
| 1,112
|
py
|
Python
|
vizier/pyvizier/converters/__init__.py
|
google/vizier
|
12b64ce191410e1c3a79a98472a1b17811290ed3
|
[
"Apache-2.0"
] | 15
|
2022-03-03T21:05:47.000Z
|
2022-03-30T17:17:51.000Z
|
vizier/pyvizier/converters/__init__.py
|
google/vizier
|
12b64ce191410e1c3a79a98472a1b17811290ed3
|
[
"Apache-2.0"
] | null | null | null |
vizier/pyvizier/converters/__init__.py
|
google/vizier
|
12b64ce191410e1c3a79a98472a1b17811290ed3
|
[
"Apache-2.0"
] | null | null | null |
"""Import target for converters."""
from vizier.pyvizier.converters.core import DefaultModelInputConverter
from vizier.pyvizier.converters.core import DefaultModelOutputConverter
from vizier.pyvizier.converters.core import DefaultTrialConverter
from vizier.pyvizier.converters.core import dict_to_array
from vizier.pyvizier.converters.core import DictOf2DArrays
from vizier.pyvizier.converters.core import ModelInputConverter
from vizier.pyvizier.converters.core import ModelOutputConverter
from vizier.pyvizier.converters.core import NumpyArraySpec
from vizier.pyvizier.converters.core import NumpyArraySpecType
from vizier.pyvizier.converters.core import STUDY_ID_FIELD
from vizier.pyvizier.converters.core import TrialToArrayConverter
from vizier.pyvizier.converters.core import TrialToNumpyDict
from vizier.pyvizier.converters.spatio_temporal import DenseSpatioTemporalConverter
from vizier.pyvizier.converters.spatio_temporal import SparseSpatioTemporalConverter
from vizier.pyvizier.converters.spatio_temporal import TimedLabels
from vizier.pyvizier.converters.spatio_temporal import TimedLabelsExtractor
| 58.526316
| 84
| 0.888489
| 124
| 1,112
| 7.903226
| 0.241935
| 0.163265
| 0.293878
| 0.457143
| 0.661224
| 0.661224
| 0.195918
| 0
| 0
| 0
| 0
| 0.000959
| 0.06205
| 1,112
| 18
| 85
| 61.777778
| 0.938639
| 0.026079
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
c7082c20a6f6967a2375669ed1bbf1f9b2f0ed77
| 163
|
py
|
Python
|
clifford/g3c.py
|
Dano-drevo/clifford
|
3da06765b3c48d9840b5d55f082363ba6075a0d4
|
[
"BSD-3-Clause"
] | 1
|
2019-11-11T13:07:29.000Z
|
2019-11-11T13:07:29.000Z
|
clifford/g3c.py
|
Dano-drevo/clifford
|
3da06765b3c48d9840b5d55f082363ba6075a0d4
|
[
"BSD-3-Clause"
] | null | null | null |
clifford/g3c.py
|
Dano-drevo/clifford
|
3da06765b3c48d9840b5d55f082363ba6075a0d4
|
[
"BSD-3-Clause"
] | null | null | null |
from . import Cl, conformalize
layout_orig, blades_orig = Cl(3)
layout, blades, stuff = conformalize(layout_orig)
locals().update(blades)
locals().update(stuff)
| 20.375
| 49
| 0.760736
| 22
| 163
| 5.5
| 0.5
| 0.297521
| 0.363636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006897
| 0.110429
| 163
| 7
| 50
| 23.285714
| 0.827586
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.2
| 0
| 0.2
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
c7630eb4c63084fdf0833fb76ca92c3bcbd45d18
| 110
|
py
|
Python
|
eggdriver/library/repos.py
|
PythonForChange/eggdriver
|
bcf1da6dcb2a8daf3144c7af8d1d04f8844be2fc
|
[
"MIT"
] | 3
|
2021-09-25T01:22:31.000Z
|
2021-11-28T23:25:46.000Z
|
eggdriver/library/repos.py
|
PythonForChange/eggdriver
|
bcf1da6dcb2a8daf3144c7af8d1d04f8844be2fc
|
[
"MIT"
] | null | null | null |
eggdriver/library/repos.py
|
PythonForChange/eggdriver
|
bcf1da6dcb2a8daf3144c7af8d1d04f8844be2fc
|
[
"MIT"
] | null | null | null |
from eggdriver.resources.modules import Repo
# NQS=Repo("PythonForChange", "eggdriver")
# nqs=NQS.pull("nqs")
| 27.5
| 44
| 0.754545
| 14
| 110
| 5.928571
| 0.642857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.081818
| 110
| 4
| 45
| 27.5
| 0.821782
| 0.545455
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
c79028d388d65ce8d699cf4359914f0aea9ae893
| 53,503
|
py
|
Python
|
Section_04_code/PyQT4/Lib/site-packages/PyQt4/examples/activeqt/webbrowser/mainwindow_rc.py
|
PacktPublishing/Python-Machine-Learning-Solutions-V-
|
8bb80a43a7c64032c25c1023faaa29bbfbd39d45
|
[
"MIT"
] | 1
|
2022-03-16T02:10:30.000Z
|
2022-03-16T02:10:30.000Z
|
Section_04_code/PyQT4/Lib/site-packages/PyQt4/examples/activeqt/webbrowser/mainwindow_rc.py
|
wensincai/Python-Machine-Learning-Solutions-V-
|
130c9881757fa90bbb124d48ddd0c6c1136fa20c
|
[
"MIT"
] | null | null | null |
Section_04_code/PyQT4/Lib/site-packages/PyQt4/examples/activeqt/webbrowser/mainwindow_rc.py
|
wensincai/Python-Machine-Learning-Solutions-V-
|
130c9881757fa90bbb124d48ddd0c6c1136fa20c
|
[
"MIT"
] | 2
|
2019-05-28T11:58:59.000Z
|
2020-09-23T17:21:19.000Z
|
# -*- coding: utf-8 -*-
# Resource object code
#
# Created: Fri 1. Mar 23:04:52 2013
# by: The Resource Compiler for PyQt (Qt v4.8.4)
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore
qt_resource_data = "\
\x00\x00\x0e\x9f\
\x2f\
\x2a\x20\x58\x50\x4d\x20\x2a\x2f\x0a\x73\x74\x61\x74\x69\x63\x20\
\x63\x68\x61\x72\x20\x2a\x69\x6d\x61\x67\x65\x36\x5b\x5d\x3d\x7b\
\x0a\x22\x32\x32\x20\x32\x32\x20\x31\x36\x35\x20\x32\x22\x2c\x0a\
\x22\x51\x74\x20\x63\x20\x4e\x6f\x6e\x65\x22\x2c\x0a\x22\x2e\x68\
\x20\x63\x20\x23\x30\x30\x30\x30\x30\x30\x22\x2c\x0a\x22\x2e\x6f\
\x20\x63\x20\x23\x30\x31\x30\x31\x30\x31\x22\x2c\x0a\x22\x61\x46\
\x20\x63\x20\x23\x30\x33\x30\x33\x30\x33\x22\x2c\x0a\x22\x23\x6e\
\x20\x63\x20\x23\x30\x36\x30\x36\x30\x36\x22\x2c\x0a\x22\x2e\x48\
\x20\x63\x20\x23\x30\x38\x30\x38\x30\x38\x22\x2c\x0a\x22\x2e\x72\
\x20\x63\x20\x23\x30\x63\x30\x63\x30\x63\x22\x2c\x0a\x22\x2e\x71\
\x20\x63\x20\x23\x31\x32\x31\x32\x31\x32\x22\x2c\x0a\x22\x23\x56\
\x20\x63\x20\x23\x31\x32\x34\x65\x37\x66\x22\x2c\x0a\x22\x23\x6d\
\x20\x63\x20\x23\x31\x33\x31\x33\x31\x33\x22\x2c\x0a\x22\x2e\x47\
\x20\x63\x20\x23\x31\x36\x31\x36\x31\x35\x22\x2c\x0a\x22\x61\x49\
\x20\x63\x20\x23\x31\x37\x32\x63\x61\x63\x22\x2c\x0a\x22\x2e\x36\
\x20\x63\x20\x23\x31\x38\x31\x38\x31\x38\x22\x2c\x0a\x22\x61\x48\
\x20\x63\x20\x23\x31\x38\x34\x31\x62\x32\x22\x2c\x0a\x22\x2e\x70\
\x20\x63\x20\x23\x31\x39\x31\x39\x31\x39\x22\x2c\x0a\x22\x23\x38\
\x20\x63\x20\x23\x31\x39\x32\x38\x62\x61\x22\x2c\x0a\x22\x23\x57\
\x20\x63\x20\x23\x31\x39\x32\x39\x62\x61\x22\x2c\x0a\x22\x61\x6a\
\x20\x63\x20\x23\x31\x39\x32\x61\x62\x61\x22\x2c\x0a\x22\x61\x76\
\x20\x63\x20\x23\x31\x61\x33\x31\x63\x32\x22\x2c\x0a\x22\x61\x45\
\x20\x63\x20\x23\x31\x62\x31\x62\x31\x62\x22\x2c\x0a\x22\x61\x44\
\x20\x63\x20\x23\x31\x62\x32\x62\x63\x38\x22\x2c\x0a\x22\x61\x74\
\x20\x63\x20\x23\x31\x63\x37\x39\x64\x32\x22\x2c\x0a\x22\x61\x68\
\x20\x63\x20\x23\x31\x63\x37\x64\x64\x34\x22\x2c\x0a\x22\x61\x41\
\x20\x63\x20\x23\x31\x64\x31\x64\x31\x64\x22\x2c\x0a\x22\x61\x43\
\x20\x63\x20\x23\x31\x64\x37\x61\x64\x37\x22\x2c\x0a\x22\x23\x36\
\x20\x63\x20\x23\x31\x64\x37\x65\x64\x61\x22\x2c\x0a\x22\x2e\x35\
\x20\x63\x20\x23\x31\x65\x31\x65\x31\x65\x22\x2c\x0a\x22\x23\x37\
\x20\x63\x20\x23\x31\x65\x38\x61\x65\x30\x22\x2c\x0a\x22\x61\x69\
\x20\x63\x20\x23\x31\x65\x38\x64\x65\x30\x22\x2c\x0a\x22\x61\x75\
\x20\x63\x20\x23\x31\x65\x39\x30\x65\x30\x22\x2c\x0a\x22\x2e\x67\
\x20\x63\x20\x23\x32\x34\x32\x34\x32\x34\x22\x2c\x0a\x22\x23\x35\
\x20\x63\x20\x23\x33\x31\x33\x62\x36\x63\x22\x2c\x0a\x22\x61\x42\
\x20\x63\x20\x23\x33\x34\x33\x64\x38\x64\x22\x2c\x0a\x22\x23\x33\
\x20\x63\x20\x23\x33\x39\x33\x39\x33\x39\x22\x2c\x0a\x22\x2e\x66\
\x20\x63\x20\x23\x33\x64\x33\x64\x33\x64\x22\x2c\x0a\x22\x61\x6b\
\x20\x63\x20\x23\x34\x33\x33\x39\x36\x37\x22\x2c\x0a\x22\x23\x43\
\x20\x63\x20\x23\x34\x33\x34\x34\x34\x32\x22\x2c\x0a\x22\x61\x77\
\x20\x63\x20\x23\x34\x34\x33\x63\x36\x65\x22\x2c\x0a\x22\x23\x6c\
\x20\x63\x20\x23\x34\x35\x34\x36\x34\x33\x22\x2c\x0a\x22\x61\x73\
\x20\x63\x20\x23\x34\x35\x34\x66\x61\x33\x22\x2c\x0a\x22\x2e\x34\
\x20\x63\x20\x23\x34\x37\x34\x38\x34\x35\x22\x2c\x0a\x22\x23\x52\
\x20\x63\x20\x23\x34\x37\x34\x38\x34\x36\x22\x2c\x0a\x22\x2e\x65\
\x20\x63\x20\x23\x34\x38\x34\x38\x34\x38\x22\x2c\x0a\x22\x61\x47\
\x20\x63\x20\x23\x34\x38\x35\x36\x61\x30\x22\x2c\x0a\x22\x2e\x64\
\x20\x63\x20\x23\x34\x61\x34\x61\x34\x61\x22\x2c\x0a\x22\x23\x39\
\x20\x63\x20\x23\x34\x63\x34\x31\x36\x61\x22\x2c\x0a\x22\x2e\x63\
\x20\x63\x20\x23\x34\x63\x34\x63\x34\x63\x22\x2c\x0a\x22\x61\x67\
\x20\x63\x20\x23\x34\x63\x35\x63\x61\x65\x22\x2c\x0a\x22\x23\x58\
\x20\x63\x20\x23\x34\x64\x34\x36\x36\x39\x22\x2c\x0a\x22\x23\x7a\
\x20\x63\x20\x23\x34\x64\x63\x32\x62\x32\x22\x2c\x0a\x22\x23\x50\
\x20\x63\x20\x23\x34\x65\x34\x34\x35\x61\x22\x2c\x0a\x22\x23\x47\
\x20\x63\x20\x23\x35\x33\x35\x30\x34\x65\x22\x2c\x0a\x22\x2e\x6c\
\x20\x63\x20\x23\x35\x34\x35\x34\x35\x34\x22\x2c\x0a\x22\x2e\x62\
\x20\x63\x20\x23\x35\x35\x35\x35\x35\x35\x22\x2c\x0a\x22\x23\x51\
\x20\x63\x20\x23\x35\x39\x35\x32\x37\x34\x22\x2c\x0a\x22\x23\x69\
\x20\x63\x20\x23\x36\x32\x62\x32\x61\x32\x22\x2c\x0a\x22\x2e\x61\
\x20\x63\x20\x23\x36\x37\x36\x37\x36\x37\x22\x2c\x0a\x22\x23\x6a\
\x20\x63\x20\x23\x36\x39\x62\x36\x61\x33\x22\x2c\x0a\x22\x2e\x46\
\x20\x63\x20\x23\x36\x66\x36\x66\x36\x61\x22\x2c\x0a\x22\x2e\x4b\
\x20\x63\x20\x23\x36\x66\x38\x62\x38\x39\x22\x2c\x0a\x22\x61\x78\
\x20\x63\x20\x23\x37\x34\x37\x34\x37\x34\x22\x2c\x0a\x22\x23\x61\
\x20\x63\x20\x23\x37\x36\x61\x31\x61\x30\x22\x2c\x0a\x22\x2e\x23\
\x20\x63\x20\x23\x37\x39\x37\x39\x37\x39\x22\x2c\x0a\x22\x23\x41\
\x20\x63\x20\x23\x37\x62\x64\x37\x65\x38\x22\x2c\x0a\x22\x61\x79\
\x20\x63\x20\x23\x37\x64\x37\x64\x37\x64\x22\x2c\x0a\x22\x23\x55\
\x20\x63\x20\x23\x37\x65\x37\x64\x37\x33\x22\x2c\x0a\x22\x2e\x49\
\x20\x63\x20\x23\x38\x30\x38\x30\x38\x30\x22\x2c\x0a\x22\x2e\x4f\
\x20\x63\x20\x23\x38\x31\x61\x33\x62\x38\x22\x2c\x0a\x22\x61\x7a\
\x20\x63\x20\x23\x38\x32\x38\x32\x38\x32\x22\x2c\x0a\x22\x23\x79\
\x20\x63\x20\x23\x38\x32\x64\x31\x63\x30\x22\x2c\x0a\x22\x23\x53\
\x20\x63\x20\x23\x38\x34\x38\x34\x38\x34\x22\x2c\x0a\x22\x23\x4f\
\x20\x63\x20\x23\x38\x35\x37\x65\x37\x63\x22\x2c\x0a\x22\x23\x46\
\x20\x63\x20\x23\x38\x36\x61\x64\x64\x32\x22\x2c\x0a\x22\x23\x6b\
\x20\x63\x20\x23\x38\x36\x63\x64\x64\x61\x22\x2c\x0a\x22\x23\x4e\
\x20\x63\x20\x23\x38\x39\x62\x31\x62\x64\x22\x2c\x0a\x22\x23\x68\
\x20\x63\x20\x23\x38\x61\x63\x65\x63\x33\x22\x2c\x0a\x22\x23\x6f\
\x20\x63\x20\x23\x38\x63\x38\x63\x38\x63\x22\x2c\x0a\x22\x23\x4c\
\x20\x63\x20\x23\x38\x63\x62\x62\x63\x61\x22\x2c\x0a\x22\x61\x6c\
\x20\x63\x20\x23\x38\x64\x38\x64\x38\x64\x22\x2c\x0a\x22\x2e\x73\
\x20\x63\x20\x23\x38\x66\x38\x66\x38\x66\x22\x2c\x0a\x22\x23\x48\
\x20\x63\x20\x23\x39\x30\x39\x30\x39\x30\x22\x2c\x0a\x22\x23\x4b\
\x20\x63\x20\x23\x39\x30\x62\x61\x63\x39\x22\x2c\x0a\x22\x23\x4d\
\x20\x63\x20\x23\x39\x31\x63\x34\x63\x66\x22\x2c\x0a\x22\x23\x74\
\x20\x63\x20\x23\x39\x32\x39\x32\x39\x32\x22\x2c\x0a\x22\x2e\x58\
\x20\x63\x20\x23\x39\x34\x63\x32\x63\x66\x22\x2c\x0a\x22\x2e\x4e\
\x20\x63\x20\x23\x39\x34\x63\x32\x64\x30\x22\x2c\x0a\x22\x2e\x57\
\x20\x63\x20\x23\x39\x35\x39\x35\x39\x35\x22\x2c\x0a\x22\x23\x42\
\x20\x63\x20\x23\x39\x36\x62\x39\x63\x62\x22\x2c\x0a\x22\x2e\x33\
\x20\x63\x20\x23\x39\x37\x62\x65\x64\x65\x22\x2c\x0a\x22\x2e\x4c\
\x20\x63\x20\x23\x39\x38\x63\x32\x63\x65\x22\x2c\x0a\x22\x2e\x50\
\x20\x63\x20\x23\x39\x62\x39\x62\x39\x62\x22\x2c\x0a\x22\x61\x2e\
\x20\x63\x20\x23\x39\x63\x39\x63\x39\x63\x22\x2c\x0a\x22\x61\x6d\
\x20\x63\x20\x23\x39\x64\x39\x64\x39\x64\x22\x2c\x0a\x22\x23\x73\
\x20\x63\x20\x23\x39\x64\x62\x64\x63\x34\x22\x2c\x0a\x22\x23\x62\
\x20\x63\x20\x23\x39\x65\x39\x65\x39\x65\x22\x2c\x0a\x22\x23\x59\
\x20\x63\x20\x23\x61\x30\x61\x30\x61\x30\x22\x2c\x0a\x22\x61\x6e\
\x20\x63\x20\x23\x61\x36\x61\x36\x61\x36\x22\x2c\x0a\x22\x23\x76\
\x20\x63\x20\x23\x61\x36\x64\x31\x64\x36\x22\x2c\x0a\x22\x2e\x5a\
\x20\x63\x20\x23\x61\x37\x61\x31\x61\x31\x22\x2c\x0a\x22\x23\x49\
\x20\x63\x20\x23\x61\x37\x61\x37\x61\x37\x22\x2c\x0a\x22\x23\x44\
\x20\x63\x20\x23\x61\x37\x65\x64\x64\x65\x22\x2c\x0a\x22\x2e\x4d\
\x20\x63\x20\x23\x61\x38\x64\x38\x65\x35\x22\x2c\x0a\x22\x23\x75\
\x20\x63\x20\x23\x61\x65\x61\x65\x61\x65\x22\x2c\x0a\x22\x61\x63\
\x20\x63\x20\x23\x62\x31\x62\x31\x62\x31\x22\x2c\x0a\x22\x61\x23\
\x20\x63\x20\x23\x62\x32\x62\x32\x62\x32\x22\x2c\x0a\x22\x2e\x32\
\x20\x63\x20\x23\x62\x35\x63\x62\x64\x36\x22\x2c\x0a\x22\x2e\x56\
\x20\x63\x20\x23\x62\x36\x62\x36\x62\x36\x22\x2c\x0a\x22\x61\x72\
\x20\x63\x20\x23\x62\x37\x62\x37\x62\x37\x22\x2c\x0a\x22\x23\x5a\
\x20\x63\x20\x23\x62\x38\x62\x38\x62\x38\x22\x2c\x0a\x22\x23\x32\
\x20\x63\x20\x23\x62\x61\x62\x61\x62\x61\x22\x2c\x0a\x22\x2e\x51\
\x20\x63\x20\x23\x62\x61\x62\x62\x62\x39\x22\x2c\x0a\x22\x61\x71\
\x20\x63\x20\x23\x62\x62\x62\x62\x62\x62\x22\x2c\x0a\x22\x23\x63\
\x20\x63\x20\x23\x62\x63\x62\x63\x62\x63\x22\x2c\x0a\x22\x2e\x38\
\x20\x63\x20\x23\x62\x66\x62\x66\x62\x66\x22\x2c\x0a\x22\x61\x70\
\x20\x63\x20\x23\x63\x30\x63\x30\x63\x30\x22\x2c\x0a\x22\x2e\x31\
\x20\x63\x20\x23\x63\x31\x64\x39\x64\x32\x22\x2c\x0a\x22\x61\x61\
\x20\x63\x20\x23\x63\x38\x63\x38\x63\x38\x22\x2c\x0a\x22\x61\x62\
\x20\x63\x20\x23\x63\x61\x63\x61\x63\x61\x22\x2c\x0a\x22\x61\x6f\
\x20\x63\x20\x23\x63\x62\x63\x62\x63\x62\x22\x2c\x0a\x22\x2e\x59\
\x20\x63\x20\x23\x63\x63\x66\x38\x66\x31\x22\x2c\x0a\x22\x23\x4a\
\x20\x63\x20\x23\x63\x64\x63\x64\x63\x64\x22\x2c\x0a\x22\x2e\x52\
\x20\x63\x20\x23\x63\x65\x63\x65\x63\x65\x22\x2c\x0a\x22\x23\x70\
\x20\x63\x20\x23\x64\x31\x66\x39\x66\x34\x22\x2c\x0a\x22\x23\x34\
\x20\x63\x20\x23\x64\x35\x64\x35\x64\x35\x22\x2c\x0a\x22\x23\x77\
\x20\x63\x20\x23\x64\x35\x65\x63\x65\x34\x22\x2c\x0a\x22\x2e\x39\
\x20\x63\x20\x23\x64\x35\x66\x36\x66\x35\x22\x2c\x0a\x22\x23\x30\
\x20\x63\x20\x23\x64\x36\x64\x36\x64\x36\x22\x2c\x0a\x22\x2e\x55\
\x20\x63\x20\x23\x64\x37\x64\x37\x64\x37\x22\x2c\x0a\x22\x61\x66\
\x20\x63\x20\x23\x64\x38\x64\x38\x64\x38\x22\x2c\x0a\x22\x2e\x78\
\x20\x63\x20\x23\x64\x61\x64\x61\x64\x61\x22\x2c\x0a\x22\x2e\x79\
\x20\x63\x20\x23\x64\x62\x64\x62\x64\x62\x22\x2c\x0a\x22\x2e\x77\
\x20\x63\x20\x23\x64\x63\x64\x63\x64\x63\x22\x2c\x0a\x22\x23\x65\
\x20\x63\x20\x23\x64\x63\x66\x37\x66\x34\x22\x2c\x0a\x22\x23\x31\
\x20\x63\x20\x23\x64\x64\x64\x64\x64\x64\x22\x2c\x0a\x22\x61\x65\
\x20\x63\x20\x23\x64\x65\x64\x65\x64\x65\x22\x2c\x0a\x22\x2e\x45\
\x20\x63\x20\x23\x64\x66\x64\x66\x64\x66\x22\x2c\x0a\x22\x2e\x7a\
\x20\x63\x20\x23\x65\x30\x65\x30\x65\x30\x22\x2c\x0a\x22\x23\x67\
\x20\x63\x20\x23\x65\x30\x65\x62\x65\x34\x22\x2c\x0a\x22\x23\x71\
\x20\x63\x20\x23\x65\x31\x65\x64\x65\x61\x22\x2c\x0a\x22\x23\x64\
\x20\x63\x20\x23\x65\x32\x65\x32\x65\x32\x22\x2c\x0a\x22\x23\x78\
\x20\x63\x20\x23\x65\x32\x65\x62\x65\x32\x22\x2c\x0a\x22\x2e\x30\
\x20\x63\x20\x23\x65\x32\x65\x65\x65\x35\x22\x2c\x0a\x22\x2e\x76\
\x20\x63\x20\x23\x65\x33\x65\x33\x65\x33\x22\x2c\x0a\x22\x23\x23\
\x20\x63\x20\x23\x65\x33\x65\x37\x64\x35\x22\x2c\x0a\x22\x23\x72\
\x20\x63\x20\x23\x65\x34\x65\x62\x64\x39\x22\x2c\x0a\x22\x23\x45\
\x20\x63\x20\x23\x65\x34\x66\x32\x65\x63\x22\x2c\x0a\x22\x2e\x75\
\x20\x63\x20\x23\x65\x35\x65\x35\x65\x35\x22\x2c\x0a\x22\x2e\x53\
\x20\x63\x20\x23\x65\x37\x65\x37\x65\x37\x22\x2c\x0a\x22\x2e\x6b\
\x20\x63\x20\x23\x65\x39\x65\x39\x65\x39\x22\x2c\x0a\x22\x2e\x41\
\x20\x63\x20\x23\x65\x62\x65\x62\x65\x62\x22\x2c\x0a\x22\x2e\x44\
\x20\x63\x20\x23\x65\x64\x65\x64\x65\x64\x22\x2c\x0a\x22\x2e\x74\
\x20\x63\x20\x23\x66\x30\x66\x30\x66\x30\x22\x2c\x0a\x22\x23\x54\
\x20\x63\x20\x23\x66\x34\x66\x34\x66\x34\x22\x2c\x0a\x22\x2e\x6e\
\x20\x63\x20\x23\x66\x36\x66\x36\x66\x36\x22\x2c\x0a\x22\x2e\x43\
\x20\x63\x20\x23\x66\x37\x66\x37\x66\x37\x22\x2c\x0a\x22\x23\x66\
\x20\x63\x20\x23\x66\x38\x66\x35\x66\x32\x22\x2c\x0a\x22\x2e\x6d\
\x20\x63\x20\x23\x66\x38\x66\x38\x66\x38\x22\x2c\x0a\x22\x2e\x37\
\x20\x63\x20\x23\x66\x39\x66\x39\x66\x39\x22\x2c\x0a\x22\x61\x64\
\x20\x63\x20\x23\x66\x61\x66\x61\x66\x61\x22\x2c\x0a\x22\x23\x2e\
\x20\x63\x20\x23\x66\x62\x66\x30\x65\x62\x22\x2c\x0a\x22\x2e\x4a\
\x20\x63\x20\x23\x66\x62\x66\x62\x66\x62\x22\x2c\x0a\x22\x2e\x54\
\x20\x63\x20\x23\x66\x63\x66\x63\x66\x63\x22\x2c\x0a\x22\x2e\x42\
\x20\x63\x20\x23\x66\x64\x66\x64\x66\x64\x22\x2c\x0a\x22\x2e\x6a\
\x20\x63\x20\x23\x66\x65\x66\x65\x66\x65\x22\x2c\x0a\x22\x2e\x69\
\x20\x63\x20\x23\x66\x66\x66\x66\x66\x66\x22\x2c\x0a\x22\x51\x74\
\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\
\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\
\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x22\x2c\x0a\x22\x51\x74\
\x51\x74\x51\x74\x51\x74\x51\x74\x2e\x23\x2e\x61\x2e\x62\x2e\x63\
\x2e\x64\x2e\x64\x2e\x65\x2e\x66\x2e\x67\x2e\x68\x51\x74\x51\x74\
\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x22\x2c\x0a\x22\x51\x74\
\x51\x74\x51\x74\x51\x74\x2e\x23\x2e\x69\x2e\x69\x2e\x69\x2e\x69\
\x2e\x69\x2e\x69\x2e\x69\x2e\x69\x2e\x69\x2e\x68\x2e\x68\x51\x74\
\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x22\x2c\x0a\x22\x51\x74\
\x51\x74\x51\x74\x51\x74\x2e\x61\x2e\x6a\x2e\x6a\x2e\x6a\x2e\x6a\
\x2e\x6a\x2e\x6a\x2e\x69\x2e\x69\x2e\x69\x2e\x68\x2e\x6b\x2e\x68\
\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x22\x2c\x0a\x22\x51\x74\
\x51\x74\x51\x74\x51\x74\x2e\x6c\x2e\x6d\x2e\x6e\x2e\x6f\x2e\x70\
\x2e\x71\x2e\x72\x2e\x73\x2e\x6a\x2e\x69\x2e\x68\x2e\x74\x2e\x75\
\x2e\x68\x51\x74\x51\x74\x51\x74\x51\x74\x22\x2c\x0a\x22\x51\x74\
\x51\x74\x51\x74\x51\x74\x2e\x65\x2e\x76\x2e\x77\x2e\x78\x2e\x79\
\x2e\x7a\x2e\x41\x2e\x6e\x2e\x42\x2e\x42\x2e\x68\x2e\x43\x2e\x44\
\x2e\x45\x2e\x68\x51\x74\x51\x74\x51\x74\x22\x2c\x0a\x22\x51\x74\
\x51\x74\x51\x74\x2e\x68\x2e\x68\x2e\x68\x2e\x68\x2e\x68\x2e\x46\
\x2e\x47\x2e\x48\x2e\x49\x2e\x6e\x2e\x4a\x2e\x68\x2e\x68\x2e\x68\
\x2e\x68\x2e\x68\x2e\x68\x51\x74\x51\x74\x22\x2c\x0a\x22\x51\x74\
\x51\x74\x2e\x68\x2e\x4b\x2e\x4c\x2e\x4d\x2e\x4e\x2e\x4f\x2e\x68\
\x2e\x50\x2e\x51\x2e\x52\x2e\x53\x2e\x6e\x2e\x54\x2e\x6a\x2e\x55\
\x2e\x56\x2e\x57\x2e\x68\x51\x74\x51\x74\x22\x2c\x0a\x22\x51\x74\
\x2e\x68\x2e\x58\x2e\x59\x2e\x5a\x2e\x30\x2e\x31\x2e\x32\x2e\x33\
\x2e\x68\x2e\x34\x2e\x35\x2e\x36\x2e\x71\x2e\x72\x2e\x73\x2e\x37\
\x2e\x76\x2e\x38\x2e\x68\x51\x74\x51\x74\x22\x2c\x0a\x22\x51\x74\
\x2e\x68\x2e\x39\x23\x2e\x2e\x23\x23\x23\x2e\x46\x2e\x46\x2e\x46\
\x23\x61\x2e\x68\x23\x62\x23\x63\x23\x64\x2e\x6d\x2e\x6a\x2e\x37\
\x2e\x76\x23\x63\x2e\x68\x51\x74\x51\x74\x22\x2c\x0a\x22\x51\x74\
\x2e\x68\x23\x65\x23\x66\x2e\x61\x23\x67\x23\x68\x23\x69\x23\x6a\
\x23\x6b\x2e\x68\x23\x6c\x23\x6d\x2e\x72\x23\x6e\x23\x6f\x2e\x37\
\x2e\x76\x23\x63\x2e\x68\x51\x74\x51\x74\x22\x2c\x0a\x22\x51\x74\
\x2e\x68\x23\x70\x23\x71\x2e\x61\x23\x72\x2e\x46\x2e\x46\x2e\x46\
\x23\x73\x2e\x68\x23\x74\x23\x75\x2e\x78\x2e\x6e\x2e\x6a\x2e\x37\
\x2e\x76\x23\x63\x2e\x68\x51\x74\x51\x74\x22\x2c\x0a\x22\x51\x74\
\x2e\x68\x23\x76\x23\x77\x2e\x23\x23\x78\x23\x79\x23\x7a\x23\x41\
\x23\x42\x2e\x68\x23\x43\x23\x6d\x23\x6d\x2e\x71\x23\x6f\x2e\x37\
\x2e\x76\x23\x63\x2e\x68\x51\x74\x51\x74\x22\x2c\x0a\x22\x51\x74\
\x51\x74\x2e\x68\x23\x44\x2e\x5a\x23\x45\x2e\x46\x2e\x46\x23\x46\
\x2e\x68\x23\x47\x23\x48\x23\x49\x23\x4a\x2e\x41\x2e\x37\x2e\x6d\
\x2e\x76\x23\x63\x2e\x68\x51\x74\x51\x74\x22\x2c\x0a\x22\x51\x74\
\x51\x74\x51\x74\x2e\x68\x23\x4b\x23\x4c\x23\x4d\x23\x4e\x2e\x68\
\x23\x4f\x23\x50\x23\x51\x23\x52\x23\x6d\x2e\x71\x23\x53\x23\x54\
\x23\x64\x23\x63\x2e\x68\x51\x74\x51\x74\x22\x2c\x0a\x22\x51\x74\
\x51\x74\x51\x74\x51\x74\x2e\x68\x2e\x68\x2e\x68\x2e\x68\x2e\x5a\
\x23\x55\x23\x56\x23\x57\x23\x58\x23\x59\x23\x5a\x23\x30\x2e\x53\
\x23\x31\x23\x32\x2e\x68\x51\x74\x51\x74\x22\x2c\x0a\x22\x51\x74\
\x51\x74\x51\x74\x51\x74\x23\x33\x2e\x74\x2e\x76\x2e\x78\x23\x34\
\x23\x35\x23\x36\x23\x37\x23\x38\x23\x39\x61\x2e\x61\x23\x61\x61\
\x61\x62\x61\x63\x2e\x68\x51\x74\x51\x74\x22\x2c\x0a\x22\x51\x74\
\x51\x74\x51\x74\x51\x74\x2e\x36\x61\x64\x2e\x6b\x61\x65\x2e\x79\
\x61\x66\x61\x67\x61\x68\x61\x69\x61\x6a\x61\x6b\x61\x6c\x61\x6d\
\x61\x6e\x61\x2e\x2e\x6f\x51\x74\x51\x74\x22\x2c\x0a\x22\x51\x74\
\x51\x74\x51\x74\x51\x74\x2e\x68\x61\x65\x61\x6f\x61\x70\x61\x71\
\x61\x71\x61\x72\x61\x73\x61\x74\x61\x75\x61\x76\x61\x77\x61\x78\
\x61\x79\x61\x7a\x2e\x6f\x51\x74\x51\x74\x22\x2c\x0a\x22\x51\x74\
\x51\x74\x51\x74\x51\x74\x51\x74\x2e\x68\x2e\x68\x2e\x68\x2e\x68\
\x2e\x68\x61\x41\x2e\x6f\x61\x42\x61\x43\x61\x69\x61\x44\x2e\x48\
\x61\x45\x61\x46\x51\x74\x51\x74\x51\x74\x22\x2c\x0a\x22\x51\x74\
\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\
\x51\x74\x51\x74\x51\x74\x51\x74\x61\x47\x61\x48\x61\x49\x51\x74\
\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x22\x2c\x0a\x22\x51\x74\
\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\
\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\
\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x22\x7d\x3b\x0a\
\x00\x00\x00\xad\
\x00\
\x00\x04\xda\x78\x9c\xd3\xd7\x52\x88\x08\xf0\x55\xd0\xd2\xe7\x2a\
\x2e\x49\x2c\xc9\x4c\x56\x48\xce\x48\x2c\x52\xd0\xca\xcc\x4d\x4c\
\x4f\x35\x89\x8e\xb5\xad\xe6\x52\x32\x36\x52\x00\x21\x05\x43\x25\
\x1d\x2e\x25\x3d\x85\x64\x05\xbf\xfc\xbc\x54\x10\x5b\x19\xc8\x56\
\x36\x00\x02\x0b\x03\x10\x37\x11\xc4\xb5\x30\xb0\x80\x72\xf5\x08\
\x80\x51\x35\x24\xab\x51\x86\x81\x44\x34\x35\xca\x98\x4a\xe0\x8a\
\xe0\x6a\x94\x11\x4a\x10\xac\x44\x34\x35\xca\x30\x43\x90\x4d\x4c\
\x44\x53\xa3\x8c\xa6\x04\xae\x11\x59\x0d\x9a\x0a\xb8\x0b\xd0\xd4\
\x60\x78\x91\x4c\x35\xc4\xd8\x85\xa6\x08\x87\x1a\x54\x7f\x61\xf8\
\x1d\x5d\x11\x7a\x18\x62\x01\x89\x30\xe5\xb8\xd5\xc0\x95\x0c\x74\
\x1a\x1b\x1e\x6a\x6a\xad\xb9\x00\xc3\x36\xd8\x5e\
\x00\x00\x15\xdf\
\x2f\
\x2a\x20\x58\x50\x4d\x20\x2a\x2f\x0a\x73\x74\x61\x74\x69\x63\x20\
\x63\x68\x61\x72\x20\x2a\x69\x6d\x61\x67\x65\x35\x5b\x5d\x3d\x7b\
\x0a\x22\x33\x32\x20\x33\x32\x20\x32\x31\x31\x20\x32\x22\x2c\x0a\
\x22\x51\x74\x20\x63\x20\x4e\x6f\x6e\x65\x22\x2c\x0a\x22\x61\x4a\
\x20\x63\x20\x23\x30\x30\x30\x30\x30\x30\x22\x2c\x0a\x22\x23\x57\
\x20\x63\x20\x23\x30\x38\x30\x38\x30\x30\x22\x2c\x0a\x22\x23\x4e\
\x20\x63\x20\x23\x31\x30\x30\x38\x30\x30\x22\x2c\x0a\x22\x23\x56\
\x20\x63\x20\x23\x31\x30\x31\x30\x30\x30\x22\x2c\x0a\x22\x23\x45\
\x20\x63\x20\x23\x31\x30\x31\x30\x30\x38\x22\x2c\x0a\x22\x23\x4d\
\x20\x63\x20\x23\x31\x38\x31\x30\x30\x30\x22\x2c\x0a\x22\x23\x4c\
\x20\x63\x20\x23\x31\x38\x31\x38\x30\x30\x22\x2c\x0a\x22\x23\x44\
\x20\x63\x20\x23\x31\x38\x31\x38\x30\x38\x22\x2c\x0a\x22\x23\x46\
\x20\x63\x20\x23\x31\x38\x31\x38\x31\x30\x22\x2c\x0a\x22\x61\x73\
\x20\x63\x20\x23\x32\x31\x31\x38\x30\x30\x22\x2c\x0a\x22\x23\x4b\
\x20\x63\x20\x23\x32\x31\x31\x38\x30\x38\x22\x2c\x0a\x22\x23\x4a\
\x20\x63\x20\x23\x32\x31\x32\x31\x31\x30\x22\x2c\x0a\x22\x23\x47\
\x20\x63\x20\x23\x32\x31\x32\x31\x31\x38\x22\x2c\x0a\x22\x23\x71\
\x20\x63\x20\x23\x32\x31\x32\x31\x32\x31\x22\x2c\x0a\x22\x61\x6e\
\x20\x63\x20\x23\x32\x39\x32\x31\x30\x30\x22\x2c\x0a\x22\x23\x49\
\x20\x63\x20\x23\x32\x39\x32\x31\x31\x30\x22\x2c\x0a\x22\x61\x4e\
\x20\x63\x20\x23\x32\x39\x32\x39\x31\x30\x22\x2c\x0a\x22\x23\x48\
\x20\x63\x20\x23\x32\x39\x32\x39\x31\x38\x22\x2c\x0a\x22\x23\x79\
\x20\x63\x20\x23\x32\x39\x32\x39\x32\x31\x22\x2c\x0a\x22\x61\x46\
\x20\x63\x20\x23\x33\x31\x32\x39\x30\x30\x22\x2c\x0a\x22\x23\x43\
\x20\x63\x20\x23\x33\x31\x32\x39\x31\x38\x22\x2c\x0a\x22\x23\x69\
\x20\x63\x20\x23\x33\x31\x32\x39\x32\x39\x22\x2c\x0a\x22\x2e\x30\
\x20\x63\x20\x23\x33\x31\x33\x31\x33\x31\x22\x2c\x0a\x22\x23\x37\
\x20\x63\x20\x23\x33\x39\x33\x31\x30\x30\x22\x2c\x0a\x22\x23\x55\
\x20\x63\x20\x23\x33\x39\x33\x31\x30\x38\x22\x2c\x0a\x22\x61\x47\
\x20\x63\x20\x23\x33\x39\x33\x39\x33\x31\x22\x2c\x0a\x22\x2e\x4d\
\x20\x63\x20\x23\x33\x39\x33\x39\x33\x39\x22\x2c\x0a\x22\x23\x75\
\x20\x63\x20\x23\x34\x32\x33\x39\x33\x39\x22\x2c\x0a\x22\x23\x63\
\x20\x63\x20\x23\x34\x32\x34\x61\x34\x32\x22\x2c\x0a\x22\x61\x45\
\x20\x63\x20\x23\x34\x61\x34\x32\x30\x30\x22\x2c\x0a\x22\x61\x59\
\x20\x63\x20\x23\x34\x61\x34\x32\x31\x30\x22\x2c\x0a\x22\x2e\x56\
\x20\x63\x20\x23\x34\x61\x34\x32\x33\x39\x22\x2c\x0a\x22\x23\x42\
\x20\x63\x20\x23\x34\x61\x34\x61\x33\x39\x22\x2c\x0a\x22\x2e\x6d\
\x20\x63\x20\x23\x34\x61\x34\x61\x34\x32\x22\x2c\x0a\x22\x61\x50\
\x20\x63\x20\x23\x34\x61\x35\x32\x31\x38\x22\x2c\x0a\x22\x2e\x47\
\x20\x63\x20\x23\x34\x61\x35\x32\x35\x32\x22\x2c\x0a\x22\x2e\x7a\
\x20\x63\x20\x23\x34\x61\x35\x61\x35\x32\x22\x2c\x0a\x22\x61\x4f\
\x20\x63\x20\x23\x35\x32\x34\x32\x30\x30\x22\x2c\x0a\x22\x61\x6d\
\x20\x63\x20\x23\x35\x32\x34\x61\x30\x30\x22\x2c\x0a\x22\x23\x54\
\x20\x63\x20\x23\x35\x32\x34\x61\x30\x38\x22\x2c\x0a\x22\x23\x70\
\x20\x63\x20\x23\x35\x32\x34\x61\x34\x61\x22\x2c\x0a\x22\x2e\x73\
\x20\x63\x20\x23\x35\x32\x35\x32\x35\x32\x22\x2c\x0a\x22\x23\x61\
\x20\x63\x20\x23\x35\x32\x35\x61\x35\x32\x22\x2c\x0a\x22\x2e\x37\
\x20\x63\x20\x23\x35\x32\x36\x33\x35\x61\x22\x2c\x0a\x22\x23\x62\
\x20\x63\x20\x23\x35\x32\x36\x33\x36\x33\x22\x2c\x0a\x22\x61\x7a\
\x20\x63\x20\x23\x35\x61\x34\x61\x30\x30\x22\x2c\x0a\x22\x23\x36\
\x20\x63\x20\x23\x35\x61\x35\x32\x30\x38\x22\x2c\x0a\x22\x61\x77\
\x20\x63\x20\x23\x35\x61\x35\x32\x31\x30\x22\x2c\x0a\x22\x2e\x5a\
\x20\x63\x20\x23\x35\x61\x35\x32\x34\x61\x22\x2c\x0a\x22\x2e\x4c\
\x20\x63\x20\x23\x35\x61\x35\x61\x35\x32\x22\x2c\x0a\x22\x2e\x79\
\x20\x63\x20\x23\x35\x61\x35\x61\x35\x61\x22\x2c\x0a\x22\x2e\x46\
\x20\x63\x20\x23\x35\x61\x36\x33\x35\x61\x22\x2c\x0a\x22\x23\x53\
\x20\x63\x20\x23\x36\x33\x35\x32\x30\x38\x22\x2c\x0a\x22\x61\x78\
\x20\x63\x20\x23\x36\x33\x35\x61\x31\x30\x22\x2c\x0a\x22\x2e\x55\
\x20\x63\x20\x23\x36\x33\x35\x61\x35\x32\x22\x2c\x0a\x22\x2e\x72\
\x20\x63\x20\x23\x36\x33\x35\x61\x35\x61\x22\x2c\x0a\x22\x61\x5a\
\x20\x63\x20\x23\x36\x33\x36\x33\x32\x31\x22\x2c\x0a\x22\x23\x23\
\x20\x63\x20\x23\x36\x33\x36\x33\x35\x61\x22\x2c\x0a\x22\x2e\x41\
\x20\x63\x20\x23\x36\x33\x36\x33\x36\x33\x22\x2c\x0a\x22\x23\x68\
\x20\x63\x20\x23\x36\x33\x36\x62\x36\x62\x22\x2c\x0a\x22\x61\x4c\
\x20\x63\x20\x23\x36\x33\x37\x33\x32\x39\x22\x2c\x0a\x22\x61\x66\
\x20\x63\x20\x23\x36\x62\x35\x61\x30\x30\x22\x2c\x0a\x22\x2e\x36\
\x20\x63\x20\x23\x36\x62\x36\x33\x35\x61\x22\x2c\x0a\x22\x23\x6f\
\x20\x63\x20\x23\x36\x62\x36\x33\x36\x33\x22\x2c\x0a\x22\x2e\x45\
\x20\x63\x20\x23\x36\x62\x36\x62\x36\x33\x22\x2c\x0a\x22\x61\x34\
\x20\x63\x20\x23\x36\x62\x37\x33\x32\x31\x22\x2c\x0a\x22\x61\x53\
\x20\x63\x20\x23\x36\x62\x37\x33\x32\x39\x22\x2c\x0a\x22\x61\x52\
\x20\x63\x20\x23\x36\x62\x37\x33\x33\x31\x22\x2c\x0a\x22\x61\x58\
\x20\x63\x20\x23\x37\x33\x36\x33\x30\x30\x22\x2c\x0a\x22\x61\x6c\
\x20\x63\x20\x23\x37\x33\x36\x33\x30\x38\x22\x2c\x0a\x22\x23\x52\
\x20\x63\x20\x23\x37\x33\x36\x62\x31\x30\x22\x2c\x0a\x22\x2e\x54\
\x20\x63\x20\x23\x37\x33\x36\x62\x36\x33\x22\x2c\x0a\x22\x61\x35\
\x20\x63\x20\x23\x37\x33\x37\x33\x32\x39\x22\x2c\x0a\x22\x2e\x69\
\x20\x63\x20\x23\x37\x33\x37\x33\x36\x62\x22\x2c\x0a\x22\x2e\x4b\
\x20\x63\x20\x23\x37\x33\x37\x33\x37\x33\x22\x2c\x0a\x22\x61\x37\
\x20\x63\x20\x23\x37\x33\x37\x62\x32\x39\x22\x2c\x0a\x22\x23\x67\
\x20\x63\x20\x23\x37\x33\x37\x62\x37\x33\x22\x2c\x0a\x22\x61\x39\
\x20\x63\x20\x23\x37\x62\x37\x33\x31\x38\x22\x2c\x0a\x22\x23\x41\
\x20\x63\x20\x23\x37\x62\x37\x33\x37\x33\x22\x2c\x0a\x22\x61\x38\
\x20\x63\x20\x23\x37\x62\x37\x62\x32\x31\x22\x2c\x0a\x22\x61\x56\
\x20\x63\x20\x23\x37\x62\x37\x62\x32\x39\x22\x2c\x0a\x22\x61\x51\
\x20\x63\x20\x23\x37\x62\x37\x62\x35\x61\x22\x2c\x0a\x22\x2e\x4a\
\x20\x63\x20\x23\x37\x62\x37\x62\x37\x33\x22\x2c\x0a\x22\x2e\x31\
\x20\x63\x20\x23\x37\x62\x37\x62\x37\x62\x22\x2c\x0a\x22\x62\x23\
\x20\x63\x20\x23\x37\x62\x38\x34\x33\x31\x22\x2c\x0a\x22\x61\x33\
\x20\x63\x20\x23\x37\x62\x38\x34\x34\x61\x22\x2c\x0a\x22\x2e\x38\
\x20\x63\x20\x23\x37\x62\x38\x34\x37\x62\x22\x2c\x0a\x22\x61\x79\
\x20\x63\x20\x23\x38\x34\x37\x33\x30\x38\x22\x2c\x0a\x22\x23\x35\
\x20\x63\x20\x23\x38\x34\x37\x33\x31\x30\x22\x2c\x0a\x22\x61\x54\
\x20\x63\x20\x23\x38\x34\x37\x62\x32\x31\x22\x2c\x0a\x22\x62\x2e\
\x20\x63\x20\x23\x38\x34\x37\x62\x32\x39\x22\x2c\x0a\x22\x2e\x53\
\x20\x63\x20\x23\x38\x34\x37\x62\x37\x33\x22\x2c\x0a\x22\x2e\x64\
\x20\x63\x20\x23\x38\x34\x37\x62\x37\x62\x22\x2c\x0a\x22\x61\x57\
\x20\x63\x20\x23\x38\x34\x38\x34\x32\x31\x22\x2c\x0a\x22\x61\x31\
\x20\x63\x20\x23\x38\x34\x38\x34\x32\x39\x22\x2c\x0a\x22\x61\x30\
\x20\x63\x20\x23\x38\x34\x38\x34\x33\x31\x22\x2c\x0a\x22\x62\x71\
\x20\x63\x20\x23\x38\x34\x38\x34\x34\x61\x22\x2c\x0a\x22\x2e\x48\
\x20\x63\x20\x23\x38\x34\x38\x34\x38\x34\x22\x2c\x0a\x22\x61\x65\
\x20\x63\x20\x23\x38\x63\x37\x62\x30\x38\x22\x2c\x0a\x22\x61\x6b\
\x20\x63\x20\x23\x38\x63\x37\x62\x31\x30\x22\x2c\x0a\x22\x23\x6a\
\x20\x63\x20\x23\x38\x63\x38\x34\x38\x34\x22\x2c\x0a\x22\x62\x61\
\x20\x63\x20\x23\x38\x63\x38\x63\x36\x33\x22\x2c\x0a\x22\x61\x32\
\x20\x63\x20\x23\x38\x63\x38\x63\x37\x62\x22\x2c\x0a\x22\x2e\x78\
\x20\x63\x20\x23\x38\x63\x38\x63\x38\x34\x22\x2c\x0a\x22\x2e\x32\
\x20\x63\x20\x23\x38\x63\x39\x34\x38\x63\x22\x2c\x0a\x22\x2e\x39\
\x20\x63\x20\x23\x38\x63\x61\x64\x61\x35\x22\x2c\x0a\x22\x61\x43\
\x20\x63\x20\x23\x39\x34\x37\x62\x30\x38\x22\x2c\x0a\x22\x2e\x59\
\x20\x63\x20\x23\x39\x34\x37\x62\x37\x33\x22\x2c\x0a\x22\x61\x4b\
\x20\x63\x20\x23\x39\x34\x38\x63\x31\x30\x22\x2c\x0a\x22\x23\x51\
\x20\x63\x20\x23\x39\x34\x38\x63\x31\x38\x22\x2c\x0a\x22\x61\x36\
\x20\x63\x20\x23\x39\x34\x38\x63\x32\x39\x22\x2c\x0a\x22\x23\x6d\
\x20\x63\x20\x23\x39\x34\x38\x63\x38\x34\x22\x2c\x0a\x22\x62\x70\
\x20\x63\x20\x23\x39\x34\x39\x34\x34\x32\x22\x2c\x0a\x22\x61\x48\
\x20\x63\x20\x23\x39\x34\x39\x34\x38\x34\x22\x2c\x0a\x22\x2e\x44\
\x20\x63\x20\x23\x39\x34\x39\x34\x38\x63\x22\x2c\x0a\x22\x2e\x6e\
\x20\x63\x20\x23\x39\x34\x39\x34\x39\x34\x22\x2c\x0a\x22\x62\x6e\
\x20\x63\x20\x23\x39\x34\x39\x63\x34\x61\x22\x2c\x0a\x22\x62\x6c\
\x20\x63\x20\x23\x39\x34\x39\x63\x35\x61\x22\x2c\x0a\x22\x62\x6a\
\x20\x63\x20\x23\x39\x34\x39\x63\x36\x33\x22\x2c\x0a\x22\x2e\x75\
\x20\x63\x20\x23\x39\x34\x39\x63\x39\x34\x22\x2c\x0a\x22\x2e\x76\
\x20\x63\x20\x23\x39\x34\x61\x35\x39\x63\x22\x2c\x0a\x22\x61\x49\
\x20\x63\x20\x23\x39\x63\x38\x63\x31\x30\x22\x2c\x0a\x22\x23\x34\
\x20\x63\x20\x23\x39\x63\x38\x63\x31\x38\x22\x2c\x0a\x22\x23\x4f\
\x20\x63\x20\x23\x39\x63\x38\x63\x32\x39\x22\x2c\x0a\x22\x61\x4d\
\x20\x63\x20\x23\x39\x63\x39\x34\x31\x38\x22\x2c\x0a\x22\x23\x72\
\x20\x63\x20\x23\x39\x63\x39\x34\x38\x63\x22\x2c\x0a\x22\x62\x6f\
\x20\x63\x20\x23\x39\x63\x39\x63\x34\x61\x22\x2c\x0a\x22\x62\x6d\
\x20\x63\x20\x23\x39\x63\x39\x63\x35\x32\x22\x2c\x0a\x22\x62\x6b\
\x20\x63\x20\x23\x39\x63\x39\x63\x35\x61\x22\x2c\x0a\x22\x62\x68\
\x20\x63\x20\x23\x39\x63\x39\x63\x36\x62\x22\x2c\x0a\x22\x2e\x6b\
\x20\x63\x20\x23\x39\x63\x39\x63\x39\x34\x22\x2c\x0a\x22\x2e\x61\
\x20\x63\x20\x23\x39\x63\x39\x63\x39\x63\x22\x2c\x0a\x22\x62\x69\
\x20\x63\x20\x23\x39\x63\x61\x35\x36\x62\x22\x2c\x0a\x22\x62\x66\
\x20\x63\x20\x23\x39\x63\x61\x35\x37\x33\x22\x2c\x0a\x22\x2e\x68\
\x20\x63\x20\x23\x39\x63\x61\x35\x39\x63\x22\x2c\x0a\x22\x23\x66\
\x20\x63\x20\x23\x39\x63\x61\x35\x61\x35\x22\x2c\x0a\x22\x2e\x42\
\x20\x63\x20\x23\x39\x63\x62\x64\x62\x35\x22\x2c\x0a\x22\x23\x50\
\x20\x63\x20\x23\x61\x35\x39\x34\x30\x30\x22\x2c\x0a\x22\x61\x42\
\x20\x63\x20\x23\x61\x35\x39\x34\x31\x38\x22\x2c\x0a\x22\x23\x6b\
\x20\x63\x20\x23\x61\x35\x39\x34\x38\x63\x22\x2c\x0a\x22\x61\x72\
\x20\x63\x20\x23\x61\x35\x39\x63\x32\x39\x22\x2c\x0a\x22\x2e\x4e\
\x20\x63\x20\x23\x61\x35\x39\x63\x38\x63\x22\x2c\x0a\x22\x2e\x50\
\x20\x63\x20\x23\x61\x35\x39\x63\x39\x34\x22\x2c\x0a\x22\x2e\x57\
\x20\x63\x20\x23\x61\x35\x39\x63\x39\x63\x22\x2c\x0a\x22\x62\x67\
\x20\x63\x20\x23\x61\x35\x61\x35\x37\x33\x22\x2c\x0a\x22\x62\x64\
\x20\x63\x20\x23\x61\x35\x61\x35\x38\x34\x22\x2c\x0a\x22\x2e\x71\
\x20\x63\x20\x23\x61\x35\x61\x35\x39\x63\x22\x2c\x0a\x22\x2e\x63\
\x20\x63\x20\x23\x61\x35\x61\x35\x61\x35\x22\x2c\x0a\x22\x62\x65\
\x20\x63\x20\x23\x61\x35\x61\x64\x37\x62\x22\x2c\x0a\x22\x2e\x6f\
\x20\x63\x20\x23\x61\x35\x61\x64\x61\x35\x22\x2c\x0a\x22\x2e\x43\
\x20\x63\x20\x23\x61\x35\x61\x64\x61\x64\x22\x2c\x0a\x22\x2e\x49\
\x20\x63\x20\x23\x61\x35\x62\x35\x61\x64\x22\x2c\x0a\x22\x23\x64\
\x20\x63\x20\x23\x61\x35\x62\x64\x62\x35\x22\x2c\x0a\x22\x2e\x77\
\x20\x63\x20\x23\x61\x35\x63\x36\x62\x64\x22\x2c\x0a\x22\x61\x64\
\x20\x63\x20\x23\x61\x64\x39\x63\x31\x30\x22\x2c\x0a\x22\x61\x6a\
\x20\x63\x20\x23\x61\x64\x39\x63\x31\x38\x22\x2c\x0a\x22\x23\x77\
\x20\x63\x20\x23\x61\x64\x39\x63\x39\x34\x22\x2c\x0a\x22\x2e\x66\
\x20\x63\x20\x23\x61\x64\x61\x35\x39\x63\x22\x2c\x0a\x22\x62\x63\
\x20\x63\x20\x23\x61\x64\x61\x64\x37\x62\x22\x2c\x0a\x22\x62\x62\
\x20\x63\x20\x23\x61\x64\x61\x64\x38\x34\x22\x2c\x0a\x22\x2e\x23\
\x20\x63\x20\x23\x61\x64\x61\x64\x61\x35\x22\x2c\x0a\x22\x2e\x74\
\x20\x63\x20\x23\x61\x64\x61\x64\x61\x64\x22\x2c\x0a\x22\x2e\x34\
\x20\x63\x20\x23\x61\x64\x62\x35\x61\x64\x22\x2c\x0a\x22\x61\x69\
\x20\x63\x20\x23\x62\x35\x61\x35\x31\x38\x22\x2c\x0a\x22\x61\x71\
\x20\x63\x20\x23\x62\x35\x61\x35\x32\x39\x22\x2c\x0a\x22\x61\x44\
\x20\x63\x20\x23\x62\x35\x61\x35\x34\x32\x22\x2c\x0a\x22\x61\x6f\
\x20\x63\x20\x23\x62\x35\x61\x35\x34\x61\x22\x2c\x0a\x22\x2e\x4f\
\x20\x63\x20\x23\x62\x35\x61\x35\x39\x34\x22\x2c\x0a\x22\x23\x76\
\x20\x63\x20\x23\x62\x35\x61\x35\x39\x63\x22\x2c\x0a\x22\x61\x74\
\x20\x63\x20\x23\x62\x35\x61\x64\x35\x32\x22\x2c\x0a\x22\x23\x7a\
\x20\x63\x20\x23\x62\x35\x61\x64\x39\x63\x22\x2c\x0a\x22\x2e\x35\
\x20\x63\x20\x23\x62\x35\x61\x64\x61\x35\x22\x2c\x0a\x22\x2e\x6a\
\x20\x63\x20\x23\x62\x35\x62\x35\x61\x64\x22\x2c\x0a\x22\x2e\x65\
\x20\x63\x20\x23\x62\x35\x62\x35\x62\x35\x22\x2c\x0a\x22\x2e\x33\
\x20\x63\x20\x23\x62\x35\x62\x64\x62\x35\x22\x2c\x0a\x22\x2e\x51\
\x20\x63\x20\x23\x62\x35\x62\x64\x62\x64\x22\x2c\x0a\x22\x23\x65\
\x20\x63\x20\x23\x62\x35\x63\x65\x63\x36\x22\x2c\x0a\x22\x23\x38\
\x20\x63\x20\x23\x62\x64\x61\x35\x33\x39\x22\x2c\x0a\x22\x23\x78\
\x20\x63\x20\x23\x62\x64\x61\x35\x39\x63\x22\x2c\x0a\x22\x23\x33\
\x20\x63\x20\x23\x62\x64\x61\x64\x32\x39\x22\x2c\x0a\x22\x23\x58\
\x20\x63\x20\x23\x62\x64\x61\x64\x33\x31\x22\x2c\x0a\x22\x61\x67\
\x20\x63\x20\x23\x62\x64\x61\x64\x34\x32\x22\x2c\x0a\x22\x23\x6e\
\x20\x63\x20\x23\x62\x64\x62\x35\x61\x64\x22\x2c\x0a\x22\x2e\x62\
\x20\x63\x20\x23\x62\x64\x62\x64\x62\x64\x22\x2c\x0a\x22\x2e\x58\
\x20\x63\x20\x23\x62\x64\x63\x65\x63\x36\x22\x2c\x0a\x22\x61\x55\
\x20\x63\x20\x23\x63\x36\x62\x35\x31\x30\x22\x2c\x0a\x22\x23\x2e\
\x20\x63\x20\x23\x63\x36\x63\x36\x62\x64\x22\x2c\x0a\x22\x2e\x70\
\x20\x63\x20\x23\x63\x36\x63\x65\x63\x65\x22\x2c\x0a\x22\x2e\x67\
\x20\x63\x20\x23\x63\x36\x64\x65\x64\x36\x22\x2c\x0a\x22\x61\x63\
\x20\x63\x20\x23\x63\x65\x62\x64\x33\x31\x22\x2c\x0a\x22\x23\x6c\
\x20\x63\x20\x23\x63\x65\x64\x36\x63\x65\x22\x2c\x0a\x22\x23\x73\
\x20\x63\x20\x23\x63\x65\x64\x36\x64\x36\x22\x2c\x0a\x22\x23\x74\
\x20\x63\x20\x23\x63\x65\x64\x65\x64\x65\x22\x2c\x0a\x22\x23\x39\
\x20\x63\x20\x23\x64\x36\x63\x36\x31\x30\x22\x2c\x0a\x22\x23\x32\
\x20\x63\x20\x23\x64\x36\x63\x36\x33\x31\x22\x2c\x0a\x22\x61\x70\
\x20\x63\x20\x23\x64\x65\x63\x36\x30\x38\x22\x2c\x0a\x22\x61\x68\
\x20\x63\x20\x23\x64\x65\x63\x65\x31\x30\x22\x2c\x0a\x22\x61\x62\
\x20\x63\x20\x23\x64\x65\x63\x65\x33\x31\x22\x2c\x0a\x22\x2e\x6c\
\x20\x63\x20\x23\x64\x65\x65\x66\x65\x37\x22\x2c\x0a\x22\x2e\x52\
\x20\x63\x20\x23\x64\x65\x65\x66\x65\x66\x22\x2c\x0a\x22\x61\x75\
\x20\x63\x20\x23\x65\x37\x63\x65\x30\x38\x22\x2c\x0a\x22\x61\x2e\
\x20\x63\x20\x23\x65\x37\x64\x36\x32\x31\x22\x2c\x0a\x22\x61\x23\
\x20\x63\x20\x23\x65\x37\x64\x36\x33\x31\x22\x2c\x0a\x22\x61\x61\
\x20\x63\x20\x23\x65\x37\x64\x36\x33\x39\x22\x2c\x0a\x22\x23\x31\
\x20\x63\x20\x23\x65\x37\x64\x65\x33\x39\x22\x2c\x0a\x22\x23\x59\
\x20\x63\x20\x23\x65\x66\x64\x65\x31\x30\x22\x2c\x0a\x22\x61\x76\
\x20\x63\x20\x23\x65\x66\x64\x65\x32\x31\x22\x2c\x0a\x22\x61\x41\
\x20\x63\x20\x23\x65\x66\x64\x65\x32\x39\x22\x2c\x0a\x22\x23\x5a\
\x20\x63\x20\x23\x66\x37\x65\x37\x32\x39\x22\x2c\x0a\x22\x23\x30\
\x20\x63\x20\x23\x66\x37\x65\x37\x33\x39\x22\x2c\x0a\x22\x51\x74\
\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\
\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x2e\x23\x2e\x61\
\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\
\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x22\x2c\
\x0a\x22\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\
\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x2e\x62\
\x2e\x63\x2e\x64\x2e\x65\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\
\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\
\x51\x74\x22\x2c\x0a\x22\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\
\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\
\x51\x74\x2e\x66\x2e\x67\x2e\x68\x2e\x69\x2e\x62\x51\x74\x51\x74\
\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\
\x51\x74\x51\x74\x51\x74\x22\x2c\x0a\x22\x51\x74\x51\x74\x51\x74\
\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\
\x51\x74\x51\x74\x2e\x6a\x2e\x6b\x2e\x6c\x2e\x65\x2e\x6d\x2e\x6e\
\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\
\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x22\x2c\x0a\x22\x51\x74\
\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\
\x51\x74\x51\x74\x51\x74\x2e\x62\x2e\x61\x2e\x6f\x2e\x70\x2e\x71\
\x2e\x72\x2e\x73\x2e\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\
\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x22\x2c\
\x0a\x22\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\
\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x2e\x75\x2e\x76\x2e\x77\
\x2e\x6b\x2e\x78\x2e\x79\x2e\x7a\x2e\x41\x2e\x62\x51\x74\x51\x74\
\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\
\x51\x74\x22\x2c\x0a\x22\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\
\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x2e\x74\x2e\x6b\
\x2e\x42\x2e\x43\x2e\x6b\x2e\x44\x2e\x45\x2e\x46\x2e\x47\x2e\x48\
\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\
\x51\x74\x51\x74\x51\x74\x22\x2c\x0a\x22\x51\x74\x51\x74\x51\x74\
\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x2e\x62\
\x2e\x23\x2e\x23\x2e\x6f\x2e\x49\x2e\x62\x2e\x63\x2e\x4a\x2e\x4b\
\x2e\x4c\x2e\x4d\x2e\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\
\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x22\x2c\x0a\x22\x51\x74\
\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\
\x51\x74\x2e\x61\x2e\x4e\x2e\x4f\x2e\x50\x2e\x51\x2e\x52\x2e\x6f\
\x2e\x53\x2e\x54\x2e\x55\x2e\x56\x2e\x73\x2e\x62\x51\x74\x51\x74\
\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x22\x2c\
\x0a\x22\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\
\x51\x74\x51\x74\x2e\x74\x2e\x4e\x2e\x50\x2e\x57\x2e\x6b\x2e\x58\
\x2e\x52\x2e\x50\x2e\x59\x2e\x69\x2e\x41\x2e\x5a\x2e\x30\x2e\x31\
\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\
\x51\x74\x22\x2c\x0a\x22\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\
\x51\x74\x51\x74\x51\x74\x2e\x62\x2e\x78\x2e\x32\x2e\x77\x2e\x33\
\x2e\x34\x2e\x49\x2e\x6a\x2e\x23\x2e\x35\x2e\x36\x2e\x4c\x2e\x46\
\x2e\x37\x2e\x30\x2e\x63\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\
\x51\x74\x51\x74\x51\x74\x22\x2c\x0a\x22\x51\x74\x51\x74\x51\x74\
\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x2e\x6e\x2e\x38\x2e\x39\
\x2e\x70\x23\x2e\x2e\x6f\x2e\x34\x2e\x78\x2e\x78\x2e\x78\x23\x23\
\x2e\x73\x23\x61\x23\x62\x2e\x73\x23\x63\x2e\x62\x51\x74\x51\x74\
\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x22\x2c\x0a\x22\x51\x74\
\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x2e\x74\x2e\x44\
\x2e\x23\x23\x64\x23\x65\x2e\x68\x23\x66\x2e\x6f\x2e\x50\x2e\x32\
\x2e\x32\x23\x67\x23\x23\x23\x68\x2e\x41\x2e\x7a\x23\x69\x2e\x4b\
\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x22\x2c\
\x0a\x22\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x2e\x62\
\x23\x6a\x2e\x50\x2e\x66\x2e\x50\x23\x6b\x2e\x71\x2e\x34\x2e\x58\
\x23\x6c\x23\x6d\x23\x6e\x2e\x6e\x23\x6f\x2e\x4a\x2e\x36\x23\x70\
\x23\x69\x23\x71\x2e\x63\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\
\x51\x74\x22\x2c\x0a\x22\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\
\x51\x74\x2e\x57\x23\x6d\x23\x6b\x2e\x4f\x23\x6d\x23\x72\x2e\x35\
\x2e\x6b\x23\x73\x23\x74\x23\x6d\x2e\x71\x23\x72\x2e\x55\x2e\x45\
\x2e\x54\x23\x70\x23\x75\x23\x69\x2e\x4d\x2e\x65\x51\x74\x51\x74\
\x51\x74\x51\x74\x51\x74\x22\x2c\x0a\x22\x51\x74\x51\x74\x51\x74\
\x51\x74\x51\x74\x2e\x62\x2e\x78\x23\x72\x2e\x6f\x2e\x74\x23\x76\
\x2e\x66\x2e\x6f\x2e\x34\x2e\x6c\x23\x6c\x23\x77\x23\x78\x2e\x66\
\x2e\x69\x23\x6f\x2e\x45\x23\x23\x23\x61\x2e\x6d\x23\x79\x2e\x6e\
\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x22\x2c\x0a\x22\x51\x74\
\x51\x74\x51\x74\x51\x74\x51\x74\x2e\x62\x2e\x65\x2e\x63\x23\x64\
\x2e\x6f\x23\x7a\x2e\x75\x2e\x71\x23\x66\x2e\x49\x2e\x71\x2e\x23\
\x2e\x23\x2e\x32\x23\x41\x2e\x55\x2e\x6d\x23\x42\x23\x68\x2e\x68\
\x2e\x6a\x2e\x62\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x22\x2c\
\x0a\x22\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\
\x51\x74\x2e\x62\x23\x43\x23\x44\x23\x45\x23\x46\x23\x47\x23\x47\
\x23\x47\x23\x48\x23\x49\x23\x4a\x23\x4b\x23\x4c\x23\x4d\x23\x4e\
\x2e\x69\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\
\x51\x74\x22\x2c\x0a\x22\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\
\x51\x74\x51\x74\x51\x74\x2e\x62\x23\x4f\x23\x50\x23\x51\x23\x52\
\x23\x53\x23\x54\x23\x55\x23\x4c\x23\x4d\x23\x56\x23\x57\x23\x4e\
\x23\x56\x23\x56\x2e\x69\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\
\x51\x74\x51\x74\x51\x74\x22\x2c\x0a\x22\x51\x74\x51\x74\x51\x74\
\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x2e\x62\x23\x58\x23\x59\
\x23\x5a\x23\x30\x23\x30\x23\x31\x23\x32\x23\x33\x23\x34\x23\x35\
\x23\x36\x23\x37\x23\x4d\x23\x56\x23\x41\x51\x74\x51\x74\x51\x74\
\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x22\x2c\x0a\x22\x51\x74\
\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x2e\x62\
\x23\x38\x23\x39\x61\x2e\x61\x23\x61\x61\x61\x61\x61\x62\x61\x63\
\x23\x33\x61\x64\x61\x65\x61\x66\x23\x37\x23\x4d\x2e\x4a\x51\x74\
\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x22\x2c\
\x0a\x22\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\
\x51\x74\x2e\x62\x61\x67\x61\x68\x61\x2e\x61\x23\x23\x31\x61\x61\
\x61\x61\x23\x32\x61\x69\x61\x6a\x61\x6b\x61\x6c\x61\x6d\x61\x6e\
\x23\x41\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\
\x51\x74\x22\x2c\x0a\x22\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\
\x51\x74\x51\x74\x51\x74\x51\x74\x61\x6f\x61\x70\x61\x2e\x61\x23\
\x61\x61\x61\x71\x61\x72\x61\x71\x23\x33\x61\x6a\x23\x35\x61\x66\
\x61\x6d\x61\x73\x2e\x4a\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\
\x51\x74\x51\x74\x51\x74\x22\x2c\x0a\x22\x51\x74\x51\x74\x51\x74\
\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x61\x74\x61\x75\
\x61\x76\x61\x23\x61\x77\x23\x56\x23\x57\x23\x4d\x61\x78\x23\x34\
\x61\x79\x61\x66\x61\x7a\x61\x6e\x2e\x4a\x51\x74\x51\x74\x51\x74\
\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x22\x2c\x0a\x22\x51\x74\
\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\
\x61\x74\x61\x68\x61\x41\x61\x42\x23\x57\x23\x4e\x23\x4d\x23\x56\
\x23\x56\x23\x52\x61\x43\x61\x6c\x61\x7a\x61\x73\x2e\x4a\x51\x74\
\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x22\x2c\
\x0a\x22\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\
\x51\x74\x2e\x62\x61\x44\x61\x70\x23\x5a\x23\x51\x23\x4e\x23\x4d\
\x23\x56\x23\x4d\x23\x4e\x23\x36\x61\x6b\x61\x66\x61\x45\x61\x46\
\x61\x47\x2e\x45\x23\x68\x2e\x4a\x51\x74\x51\x74\x51\x74\x51\x74\
\x51\x74\x22\x2c\x0a\x22\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\
\x51\x74\x51\x74\x51\x74\x61\x48\x61\x49\x61\x68\x61\x41\x61\x42\
\x23\x57\x23\x57\x23\x4e\x23\x57\x61\x4a\x23\x36\x61\x4b\x61\x6c\
\x61\x6d\x61\x46\x23\x57\x61\x4a\x61\x4a\x23\x79\x51\x74\x51\x74\
\x51\x74\x51\x74\x51\x74\x22\x2c\x0a\x22\x51\x74\x51\x74\x51\x74\
\x51\x74\x51\x74\x51\x74\x51\x74\x2e\x71\x61\x4c\x61\x49\x61\x68\
\x61\x76\x61\x4d\x23\x57\x61\x4a\x23\x57\x23\x44\x61\x4e\x23\x52\
\x61\x65\x61\x6c\x61\x4f\x61\x6e\x23\x4e\x61\x4e\x61\x50\x61\x51\
\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x22\x2c\x0a\x22\x51\x74\
\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x2e\x6a\x61\x52\x61\x53\
\x61\x54\x61\x55\x61\x76\x61\x42\x23\x57\x61\x4e\x61\x50\x61\x4c\
\x61\x56\x61\x57\x61\x65\x61\x58\x23\x36\x61\x59\x61\x5a\x61\x30\
\x61\x31\x61\x32\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x22\x2c\
\x0a\x22\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x2e\x62\x61\x33\
\x61\x34\x61\x35\x61\x35\x61\x56\x61\x36\x61\x31\x61\x5a\x61\x37\
\x61\x53\x61\x53\x61\x35\x61\x38\x61\x39\x61\x34\x61\x35\x62\x2e\
\x62\x23\x61\x35\x61\x35\x2e\x6b\x51\x74\x51\x74\x51\x74\x51\x74\
\x51\x74\x22\x2c\x0a\x22\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\
\x62\x61\x61\x37\x61\x37\x61\x56\x61\x30\x62\x2e\x62\x23\x61\x37\
\x62\x23\x61\x30\x61\x37\x61\x37\x61\x37\x61\x53\x61\x53\x61\x37\
\x62\x2e\x61\x56\x61\x30\x61\x56\x61\x52\x2e\x23\x51\x74\x51\x74\
\x51\x74\x51\x74\x51\x74\x22\x2c\x0a\x22\x51\x74\x51\x74\x51\x74\
\x51\x74\x51\x74\x62\x62\x62\x63\x62\x64\x62\x65\x62\x65\x62\x66\
\x62\x67\x62\x68\x62\x68\x62\x69\x62\x6a\x62\x6a\x62\x6b\x62\x6b\
\x62\x6c\x62\x6d\x62\x6d\x62\x6e\x62\x6f\x62\x70\x62\x71\x2e\x62\
\x51\x74\x51\x74\x51\x74\x51\x74\x51\x74\x22\x7d\x3b\x0a\
\x00\x00\x05\x19\
\x00\
\x00\x11\x8f\x78\x9c\xe5\x96\x5b\x4f\x23\x47\x10\x85\xdf\xf9\x15\
\xd6\xd6\xdb\x2a\xaa\xc5\xe3\xb9\x2a\xca\x03\x77\x03\xcb\xc5\x5c\
\x8c\x21\xca\x43\xcf\x8c\x6f\x80\xcd\xc5\x06\x83\xa3\xfc\xf7\x54\
\xf7\xa9\xee\x45\xbb\xfb\xb0\x6c\xa4\x48\x51\x54\x80\xf8\x5c\x5d\
\xa7\x4e\xd7\xf4\x8c\xe7\xd3\xc7\x46\xef\xf8\xa0\xf1\xf1\xd3\xca\
\x6c\x6e\xe6\xe3\xaa\x51\x8d\xcc\x63\xe3\xe3\x78\x62\x86\xfd\xe8\
\xf7\x3f\x7e\xfb\x73\xe5\x43\x2b\x6a\xc8\x4f\x33\x8e\x1a\xd1\x87\
\x5f\x56\x3e\x74\xe6\x8d\xaa\x71\x78\x37\xed\x5b\xa0\x4b\x01\x5a\
\x5d\x6d\xe6\xfd\xcc\xb2\x31\x81\x07\x2e\xbf\xe5\x79\x00\xee\x80\
\xa3\xa6\xe6\xf7\x3d\x23\x6f\x48\xb9\x50\x66\xc7\xb9\x67\x3a\x06\
\xb7\x74\x3d\xc5\x96\x9b\x52\x5f\xa7\x8e\x4f\x02\xc3\xdf\x67\xe5\
\x42\xf3\x5d\xcf\xf0\x4b\x53\x70\xab\x18\x80\x2f\x02\x43\xff\x08\
\x1c\x47\xea\x67\xe4\x58\xfa\x57\xd0\x7f\x05\x87\xfc\xb5\xe5\xa8\
\x29\x79\xf4\x3b\x0d\x8c\xf5\x4b\xe5\x02\xfe\x78\x15\x1c\x47\x60\
\x33\xf6\x0c\x7f\x3c\x57\x36\x3a\xaf\x4d\xcf\xe8\xc7\x2f\xe0\xc4\
\xf7\xbf\x75\x5c\x78\x7d\xaa\xc1\x52\x8f\xfd\xed\x82\xfd\x7a\xda\
\x53\x56\x3d\x3a\xb3\xdc\x92\xfe\x65\xed\xf8\x10\x1c\xf2\x33\x70\
\xda\x52\xde\x76\x2c\xfa\xba\xdf\x1d\x70\x12\x61\xde\x66\xa8\xac\
\xfd\x79\x1d\x2c\xf5\x38\x2f\xfd\xc0\xd8\x8f\xae\x4f\x4b\xd5\x3f\
\xb7\x1c\x47\xb1\x29\x13\xc7\x11\x38\x89\xc0\xbc\xa7\x6c\x74\xbe\
\x6b\xe0\xb4\xa5\xf3\x2d\x95\x4b\xdd\xff\x7d\x60\xe8\xbf\x78\xd6\
\xfe\x53\x70\xe6\xfd\xdc\x39\x36\x69\x4b\xf5\xe7\xca\xaa\xc7\x1d\
\x70\xa6\xfd\xa8\xe5\x59\xfd\x6f\x28\xfb\xfd\x5c\x59\x4e\xc4\x2f\
\xe6\xcb\x4d\x70\x5a\x2a\x5f\x78\xc6\x3c\xf9\x24\x30\xf6\x73\x0c\
\xce\xfc\x7e\x16\xe0\x3c\x56\xfd\x89\x72\xa5\xe7\xa1\x72\x2c\xfe\
\xa1\x6f\x6e\x94\x7d\xbf\x23\xb0\xf8\x83\x5e\x1b\x1c\xf4\x0a\x65\
\xaf\x37\xb0\x9c\xb6\xa4\x1e\xf3\x1f\x05\x86\x7e\x09\xce\x63\x9d\
\xef\xb3\xb2\xd6\xf3\x02\x5c\xa8\x3e\xcf\x94\x35\x4f\x07\x8e\xcb\
\xac\xa5\xfa\x1b\x81\x71\x1e\x9f\xc0\x7e\x3d\xbb\xfd\x65\xad\xcc\
\xfb\x39\x05\xe7\xb1\xce\xeb\x50\xb9\xd2\xeb\x65\xc0\x45\xac\xe7\
\x23\xf7\xac\x7e\x3d\x57\x3a\x8f\xf5\xc0\xe8\xb7\x06\x36\x89\xfa\
\x75\xf7\x6f\x56\xe6\xb1\xf6\x3f\x53\xae\x74\xbe\x05\xb8\x88\xf5\
\xfe\x60\xcf\xb8\x3f\x38\x03\x7b\x3d\xee\x29\xd7\xca\xbb\xe0\xd2\
\xf7\x4b\x2c\xe7\x71\x5e\x19\xf4\x7b\x0d\x8c\x7e\x07\xe0\x42\xfd\
\x10\x29\x57\xfa\xfc\xab\xc0\x41\x7f\x07\xec\xf5\xf9\x4a\x59\xf3\
\xe4\xce\x7f\x5e\x79\x7d\x32\x60\x93\xe8\x7c\x2f\xc1\xa1\xbe\xad\
\xec\xeb\xdd\xf9\x2e\xc4\x8f\xfa\x3d\x0f\x0c\xbd\x4c\xd9\xeb\x97\
\x60\x53\xeb\xf5\x5a\x82\xab\x54\xf5\x9f\x95\xfb\xca\xee\x79\x5b\
\x54\xbe\x9e\x9f\xc0\x26\x51\xbd\x47\x70\xa8\x9f\x28\xfb\x7a\xf7\
\x7c\x94\xd5\x09\xfc\x51\x1a\x18\x7a\x5d\xcf\x7a\x7d\xf7\x95\x6b\
\xe5\x08\x1c\xf4\x1e\xc1\xb5\xef\xe7\xbe\x7f\x4c\x6d\xc3\xf1\xd8\
\xb3\xd6\xdf\x07\x46\xfe\x01\x5c\xab\x1e\xf5\xc1\xfd\x4c\xd9\x9d\
\x8f\x32\x29\xbd\x9f\xcd\xc0\xa8\xdf\xf2\xac\xcf\x8f\x6d\xe5\x5a\
\x79\x00\x0e\xfa\x0f\x60\x41\xf8\x75\xe7\xa5\xac\x4b\xf5\x63\x6a\
\xcf\x5a\x9f\x82\x43\xfd\x18\x1c\xfc\x8d\x94\x55\x8f\xdc\xf3\x5c\
\xa6\xef\xf3\xee\xfb\xa9\xea\x87\xf5\xd7\xe0\x90\x1f\x28\x7b\x3f\
\xee\x79\x55\xa7\x41\x4f\xd9\xaf\xe7\x5b\x65\xbf\xde\xcd\xab\xee\
\x87\xfd\xb8\xfb\xc5\x76\x53\x76\xef\x0b\x4e\x1e\xec\x9e\xd7\x83\
\x90\xef\xcc\xff\x59\xfc\xff\x34\x98\xd8\x70\xc9\x15\xd3\x3f\xd0\
\xa8\xb9\xcf\x03\x1e\xf2\x88\xc7\x3f\xad\x71\xcd\x37\x7c\xcb\x13\
\x9e\xf2\x1d\xdf\xff\xb4\x8f\x01\x3f\xf0\xa3\xa8\xcc\x78\xce\xd5\
\x4f\x69\x3c\x89\xc2\xb3\x28\x58\x95\x05\xbf\xf0\xeb\xdb\xc9\xfc\
\xb0\xc6\x52\x63\x22\xb1\xc6\xeb\xbc\xc1\x9b\x5f\x6b\xc8\xe4\xb7\
\x78\x5b\xfe\x7e\x77\xf2\x32\x8f\x1d\x6e\x8b\x42\x5b\x55\x76\x65\
\x32\x7b\xbc\xff\x45\x83\x3f\xf3\x01\x1f\xf2\x11\x1f\x73\x87\x4f\
\xf8\x94\xcf\xf8\x5c\x14\x83\x1e\x77\xf9\x82\x7b\x52\x7f\x29\xbf\
\x6d\xf5\x73\x25\x57\x69\x95\xbb\x41\xa3\xc9\x11\xb7\x38\xe6\x44\
\xa6\x9f\xca\xff\x97\x9c\x71\x2e\x27\xa2\x10\x19\x22\x43\xa5\x7c\
\xb2\xa3\x71\xe9\x7e\x45\x89\x2a\x9e\x52\x2d\xbb\x24\xbc\xa3\x51\
\x9f\x06\x34\xa4\x11\x8d\x79\x20\x7f\x87\x12\xd7\x74\x63\x43\xce\
\x45\xcc\x31\xdd\x4a\x45\x2f\xe8\x20\xc4\x13\x4d\x68\x4a\x77\x78\
\xaf\xe3\x09\xe1\xca\xd9\x58\x92\xfd\x6f\xe9\xae\xc2\x83\x8b\x89\
\xac\xb7\x55\x3d\x51\x42\x7d\x4f\x23\xa3\x19\xef\xe1\xdd\x4e\xae\
\xdb\x83\xab\x6b\x87\xb8\x7a\xe3\x7f\x27\x28\xec\x3a\x3f\x36\xd6\
\x78\x8d\x9e\xe8\xc9\xee\x47\x35\xda\xd2\x73\x69\xab\xc2\x9a\x2c\
\xf4\xfa\x36\x32\x57\xff\x4c\x0b\x7a\xa1\x57\x5a\xe2\x1d\x56\xae\
\x95\xf5\xfb\xa5\xc3\xba\xfd\xb5\x7d\x5c\xac\x3b\x0f\xbd\xb7\x0e\
\x78\x41\x1b\x3c\xa4\x4d\xda\xa2\x6d\xbc\x87\xcb\x09\xde\x91\xdc\
\x42\xb4\xdb\x64\xb3\x53\xe9\x30\xa3\x5d\xe9\x62\x63\x8f\xda\xc1\
\x59\xa6\x0a\x0b\xf9\xf4\x95\xf6\xe9\x33\x9f\xe3\x5d\x4c\x4e\xef\
\x94\x0e\x45\xf5\x88\x8e\xe5\xf3\x7d\xea\x48\x9c\xd0\x29\x9d\xd1\
\xb9\x44\x97\x2e\xa4\x83\x3a\xb4\x0a\xb2\x7a\x53\x56\x6e\x89\x42\
\x17\xe7\x83\x7a\x3c\x92\x75\x1d\xba\x74\x75\xe7\x74\x45\xab\xd4\
\xe4\x7b\x7b\xca\x5c\x74\x29\x12\xa7\x99\x4e\x71\x41\x2d\x55\x88\
\x29\x79\x7b\xd6\x29\xa5\x26\x65\xbc\xf9\xfd\xd3\x2e\xd9\x1c\x0e\
\xa8\x90\x3d\xbe\x1a\xb6\x73\x90\xf3\x4b\xef\xb8\xe7\x88\x73\xa9\
\xb6\xb3\x9a\xd1\x85\x21\x63\x28\xa2\xde\x3b\xef\x5b\x32\xa5\x9d\
\xb5\xcc\xec\x42\xa6\xb5\x65\xaa\xf7\xdf\xfb\xa6\xe6\x63\xd3\x97\
\x2b\x21\x1e\xa8\x6b\x06\x5f\x3c\xbc\xcb\xc7\x50\x1c\xc8\x1c\xcd\
\x88\xd2\xaf\xb3\x3f\xac\x31\x76\x7b\xb8\xe6\xeb\x6f\xb3\x3f\xac\
\x71\x63\x6e\xcd\xcd\xf7\x9e\xea\xff\xb5\xef\xb9\x7f\x41\xe3\xaf\
\x5f\x57\xfe\x06\xf5\x23\xb8\x71\
\x00\x00\x00\xa7\
\x00\
\x00\x03\x23\x78\x9c\xd3\xd7\x52\x88\x08\xf0\x55\xd0\xd2\xe7\x2a\
\x2e\x49\x2c\xc9\x4c\x56\x48\xce\x48\x2c\x52\xd0\xca\xcc\x4d\x4c\
\x4f\x35\x8e\x8e\xb5\xad\xe6\x52\x32\x32\x55\x00\x21\x05\x43\x25\
\x1d\x2e\x25\x65\x85\x64\x05\x65\x03\x30\x00\x71\xf5\x40\xdc\x34\
\x30\x00\x73\x71\x01\x2a\x49\x2a\x83\x01\x56\x49\x65\x38\xc0\x94\
\x84\x89\x22\xc9\xc2\x25\x91\x34\xc0\x99\x30\x49\x14\xe3\x60\x4c\
\x84\x24\xaa\x5e\x64\x49\xb0\x04\x42\x16\xca\x82\x4b\xa2\xda\x8b\
\x45\x12\x21\x8b\x4d\x12\x2e\x8b\x55\x52\x0f\xee\x59\x74\x07\x21\
\x64\x51\x1d\xa4\x87\xea\x49\x0c\x7f\x22\xcb\xa2\x49\xe2\x0d\x3e\
\xfc\x01\x8f\x3f\xca\xe0\xd2\x70\x2e\xb5\xd2\x10\x69\x92\xb5\xd6\
\x5c\x00\xe0\x4d\x89\x7e\
\x00\x00\x00\xad\
\x00\
\x00\x04\xda\x78\x9c\xd3\xd7\x52\x88\x08\xf0\x55\xd0\xd2\xe7\x2a\
\x2e\x49\x2c\xc9\x4c\x56\x48\xce\x48\x2c\x52\xd0\xca\xcc\x4d\x4c\
\x4f\x35\x88\x8e\xb5\xad\xe6\x52\x32\x36\x52\x00\x21\x05\x43\x25\
\x1d\x2e\x25\x3d\x85\x64\x05\xbf\xfc\xbc\x54\x10\x3b\x11\xc8\x56\
\x36\x00\x02\x0b\x03\x10\x57\x19\xc4\xb5\x30\xb0\x80\x72\xf5\x08\
\x80\x51\x35\x60\xa0\x9c\x08\x03\x38\xd5\x20\x94\x20\x29\x4a\x44\
\x51\xa3\x8c\x90\x42\x66\x21\xab\x51\x46\xd1\x0d\xe5\x24\xa2\xaa\
\x41\x75\x05\x44\x51\x22\x86\x1a\x54\x77\x22\xbb\x0c\x97\x1a\x3d\
\x2a\xa9\x21\xc2\xae\x44\x6c\x6a\x94\x31\xfc\x85\xa9\x06\x45\x51\
\x22\x0e\x35\x40\x09\x65\x3d\xec\x00\x29\x4e\x71\x2a\x1a\xc4\xe9\
\x70\x78\xa9\xa9\xb5\xe6\x02\x00\x3d\x48\xf5\x6a\
\x00\x00\x05\x1a\
\x00\
\x00\x11\x8f\x78\x9c\xe5\x96\x49\x73\x32\x47\x0c\x86\xef\xfe\x15\
\x94\x75\x73\xa5\xf4\xc1\xcc\x30\xcc\x54\x2a\x07\xef\x60\xbc\x80\
\x8d\xd7\x54\x0e\x3d\x1b\x06\xbc\xb1\x18\x2f\xa9\xfc\xf7\xa8\x5b\
\x52\xdb\xf9\xca\x17\x93\x5b\x52\x32\x86\x07\x49\xaf\xa4\xee\x9e\
\x19\x7e\x6c\xd4\xae\x7a\x47\xb5\x8d\x1f\x6b\xf3\x85\x59\x8c\xf2\
\x5a\x7e\x6b\x66\xb5\x8d\xd1\xbd\x19\x96\x8d\xdf\xff\xf8\xed\xcf\
\xb5\xf5\x30\xa8\xd1\x5f\x23\x0a\x6a\xc1\xfa\x2f\x6b\xeb\xfd\x45\
\x2d\xaf\x1d\x3f\x3e\x94\x16\xe0\x84\x00\xea\xf5\x46\xbd\x6a\x39\
\xee\x31\x07\x8d\xb2\x72\x7c\xaa\x2c\xfe\x33\xcf\xec\x1f\x08\xa7\
\xcc\xe6\x96\x39\x54\xff\xb1\xe3\x84\xfc\x9c\xff\xca\xec\xfd\xdb\
\xc2\x92\x0f\xbb\x96\x1b\xa4\x57\x72\xfc\x8c\xd9\xfb\x1f\x99\xa3\
\x40\xb8\xef\x98\xf4\x0a\x37\x8f\x99\x28\x4b\xbe\xb0\x8f\x6f\x0b\
\x1b\xe9\x77\x6c\x39\x68\x84\x5a\x6f\xce\x1c\x05\xac\x07\xfb\xc2\
\x12\x8f\x29\x73\x53\xf5\x02\xc7\x69\x98\xe6\xb1\xd3\x1b\x2a\x17\
\xcc\xc0\x1c\x05\x39\xeb\x3d\x29\x8b\xfe\x39\xb3\xea\x99\x52\x58\
\xea\xc1\x9b\xe5\x90\xea\x71\x3c\x06\xca\xd2\x6f\x47\x58\xe3\xdf\
\x99\xe3\x50\xb8\xe1\x38\x8d\x4c\x56\x38\x3e\x54\x96\xfa\x5b\xcc\
\x3e\xfe\x45\x38\x13\xbe\xb0\x1c\x05\x54\xdf\xcd\x03\x77\x9e\x39\
\xff\x92\x39\x0e\xe5\xbc\x4c\x85\x25\x1f\x1b\xcc\x2d\xd5\x6f\x3a\
\x36\x4d\x23\x7a\x07\xcc\xde\xdf\x15\x96\x7c\x93\x33\x27\x91\x70\
\x66\xb9\x49\xf5\x78\xbd\x71\xe0\x99\xd7\xe7\x88\xb9\x15\xb2\x3e\
\xe6\xca\xbc\x5e\x78\x2e\x9c\x49\xff\x0b\x65\x39\x9f\x9b\xcc\x5a\
\x0f\xae\x1c\x9b\x38\x34\x6e\xfd\xf0\x82\x39\x89\x64\xde\x3d\x65\
\x89\x5f\x0a\xe7\xd2\x6f\x61\x39\x0e\xe3\x8c\xd7\x1f\x4b\xe6\x56\
\x26\xfd\x5f\x7a\xe6\xfe\x43\x65\xe9\xbf\xcf\x9c\x44\xb2\xff\x4d\
\xe1\x5c\xea\x67\xca\x52\x7f\xcc\x9c\xea\x7a\x55\x8e\xb3\x56\x28\
\xfb\x5f\x57\x96\xfa\x67\xcc\x49\x24\xe7\xf3\x99\x59\xf3\x71\x5b\
\x58\xe7\x19\x59\x6e\x7d\xf4\xfb\xce\x9c\x44\x59\xd3\xf1\x58\x99\
\xf5\x21\x16\xce\x25\x3e\x52\x96\xf9\x32\xe6\x34\x97\xfd\xe9\x30\
\x9b\xa6\xcc\xf3\x20\x5c\x08\xdf\x38\xce\x7c\xbd\x53\xe1\x5c\xd6\
\xf7\x80\x39\x8d\x84\x39\x3f\x33\x4d\xd1\xef\x09\xab\x5e\xce\x9c\
\x69\x3d\x77\x7e\x92\x28\x89\xd2\xdc\x71\xa8\x2c\xfb\x3f\x12\xce\
\xe5\x7a\xbf\x63\xd6\x7a\xc6\x08\x8b\x1f\xdf\x98\xb5\x1e\x9e\x30\
\xfb\x7a\x12\x9f\xa9\xdf\x9d\xc7\x24\x4f\xb5\x9e\x51\x96\x79\x77\
\x84\x75\xde\x7b\x65\x59\xdf\x4d\x66\x9a\x97\xf5\x2a\xe1\x82\xcf\
\x37\xb6\x99\xb5\x1e\x38\xbd\x94\xfa\x37\x4e\x1f\x12\x65\x39\x2f\
\xa9\xb2\xcc\x8b\xc2\x5a\x7f\xc6\x9c\xc7\xa2\x57\x08\x97\xc2\x6e\
\xbf\xd3\xdc\x5e\xf0\x8e\x5b\xca\x32\xcf\x33\x73\xa6\xfd\xc5\xcc\
\xaa\x87\x75\x61\xd1\xc3\x57\xe6\x42\xfd\xee\x7e\x46\xea\xa2\x07\
\xd7\x9e\xb9\x5e\xc9\xec\xfb\x01\x66\x9f\xef\xae\x6f\x53\x98\x42\
\xfa\x99\x30\xfb\x79\x46\xcc\x3e\xde\x3d\x9f\xb2\x66\xa6\xfd\x17\
\x9e\x79\x3d\xba\xc2\x85\xec\xff\x1e\xb3\xef\xff\x98\xd9\xeb\x6d\
\x09\xab\xdf\x3d\x7f\xb2\xc2\x9a\x63\x50\x96\xeb\x09\x99\x7d\xfe\
\x3e\x73\xd9\x92\x7e\xdd\xf3\x37\x8f\x3d\x57\xc2\x95\xc4\xbf\x30\
\x57\xe2\x47\x8e\x2f\xbd\xde\x35\xb3\xe6\xe3\x54\x58\xf3\x6f\x98\
\x7d\xbe\x3b\xff\xc5\x87\x7e\xc2\xec\xfd\xee\x79\x5d\x7c\xe4\xb7\
\x98\xd5\x0f\xf7\xc2\xe2\x07\xf7\xfc\xb4\xd5\x25\xde\xdd\x4f\x9d\
\x9b\xd9\x3d\x3f\xab\x0f\xbf\x8b\xaf\xbc\xbf\xbf\xf8\x77\xf6\x7f\
\xd0\x40\x40\x83\x19\xe6\x68\x56\xd5\xc0\x02\x4b\xac\x70\x88\xb7\
\x38\x5a\x4d\x83\x14\xc6\x38\x21\x85\x21\xde\xe1\xfd\x2a\x1a\x34\
\xc3\x03\x3e\x52\xfe\x13\x4e\x71\xf6\x7d\x0d\x9c\xe3\x02\x9f\x5d\
\x07\x4b\x7c\xc1\x57\x7c\x5b\x41\xe3\x1d\x37\x45\x61\x8b\x14\x66\
\xb8\x8d\x3b\xdf\xd1\xc0\x5d\x9a\x62\x4f\x14\xf6\x29\xbf\x4d\xd6\
\xc1\x83\xaf\xb2\xa8\xdf\x39\x76\xad\x7d\xd6\xc0\x43\x3c\xc2\x63\
\xa7\x30\xa5\x0e\xda\x78\x42\x73\xf4\xc8\xfa\x78\xf8\x8f\x3c\x83\
\xa7\x78\x86\x03\x3c\xc7\x0b\xb2\x3e\x5e\xe2\x15\x6b\xd0\x69\xe8\
\xe3\xb5\x53\xb8\xc1\xba\xcf\xb7\xd6\xc0\x00\x43\x8c\x28\xaf\xa4\
\xd3\xd2\xa4\x1d\x8f\xa9\xd6\x1d\xb6\x30\x71\xb1\x29\x0e\xf8\x9e\
\x85\x97\x80\x6e\x27\x6e\x00\xc0\x90\x86\xcd\xee\x90\xf5\x20\x83\
\x1c\x0a\x28\x71\x0b\x2a\x18\xba\xcc\x04\xa7\x70\x0b\x23\x3b\x2d\
\x8c\x61\x02\x77\xfc\xbb\x88\x62\x86\x70\x8f\xfb\x50\x48\x0f\x9d\
\x4f\xd6\xc6\x98\x94\x81\x6a\xda\x2c\xaa\x00\x39\x76\xe0\x81\x5e\
\x63\x6c\xc0\x23\x3c\xf1\x6f\x23\x7c\x82\x8a\x56\xa1\xfe\x85\xc2\
\xd7\xb6\x6d\x8d\x7a\x9c\xc2\x0c\xe6\xfc\x7b\x90\xaa\xd8\x7d\xb0\
\x15\x7a\x3f\xc5\x72\xfc\xc7\x27\x9b\xfb\x4c\xaf\x31\xd9\x12\x5e\
\xe0\x95\xfb\x80\x37\x78\xa7\xbe\xdc\xfc\x1f\x55\xf4\x13\xc5\x66\
\x14\xbd\xa4\xff\x63\x67\x99\xcb\xb6\xdf\x6d\xc2\x16\x6c\x8b\xc6\
\x0e\xec\xd2\xda\x4c\xe9\xbb\xf1\x4f\x35\x3b\xb0\x07\xfb\xe4\x9b\
\x40\x9b\xde\x3b\xb0\x45\xfb\xd4\x80\x03\xe8\x42\x97\xde\xa7\x54\
\xfb\x15\x0e\x79\x5f\xe0\x08\x8e\x69\xb2\x77\xf2\x65\xae\xd3\x67\
\xed\x84\x2a\x9d\x40\x0f\xfa\x54\xa5\x07\xa7\x70\x06\x03\xca\x9a\
\x91\xb5\x31\x85\x73\xd2\x1d\xc0\x85\x3f\x63\xef\x4e\xa5\x43\xea\
\x4b\xea\xf5\xd9\x76\x44\x4a\x1d\xb8\x84\x2b\xd8\xa1\x2a\xd7\xd4\
\xed\x0d\xd4\xa1\x01\x01\x29\xf6\xc8\x8e\xc9\x06\xa4\x1d\x7a\x8d\
\x39\x44\xd0\xa4\x1a\x6d\xaa\xdc\x75\x3a\x6c\xf1\x17\xa7\xbc\xc0\
\x2b\x68\x41\x02\xa9\x41\xbc\xc2\xf9\xe7\x6b\x0e\x0b\x03\xd4\xed\
\x84\x66\x3a\x90\x35\x5c\x9a\x6f\xdf\xc7\x70\xd7\x64\xd4\xcb\x23\
\xad\x9e\xed\x65\x69\xf2\x55\xee\x63\x38\x37\x05\xcd\x38\x33\x25\
\xad\x7a\x17\x5e\x4c\xf5\x7d\x0d\x77\xfd\x1d\x9a\xa1\xb9\xa5\x99\
\xce\x61\xb2\x9a\x86\x35\x5a\xdf\xbe\xdd\x45\x73\x6b\x56\xbc\x27\
\xb3\x0a\xfd\x16\x1a\x9b\x89\xb9\x5b\x5d\xe3\x3b\xf6\x9f\xd2\xf8\
\xeb\xd7\xb5\xbf\x01\xda\x3f\xb9\x0e\
"
qt_resource_name = "\
\x00\x05\
\x00\x6f\xa6\x53\
\x00\x69\
\x00\x63\x00\x6f\x00\x6e\x00\x73\
\x00\x0a\
\x0d\x87\x59\x8d\
\x00\x69\
\x00\x6d\x00\x61\x00\x67\x00\x65\x00\x36\x00\x2e\x00\x78\x00\x70\x00\x6d\
\x00\x0a\
\x0d\x89\x59\x8d\
\x00\x69\
\x00\x6d\x00\x61\x00\x67\x00\x65\x00\x34\x00\x2e\x00\x78\x00\x70\x00\x6d\
\x00\x0a\
\x0d\x8a\x59\x8d\
\x00\x69\
\x00\x6d\x00\x61\x00\x67\x00\x65\x00\x35\x00\x2e\x00\x78\x00\x70\x00\x6d\
\x00\x0a\
\x0d\x8b\x59\x8d\
\x00\x69\
\x00\x6d\x00\x61\x00\x67\x00\x65\x00\x32\x00\x2e\x00\x78\x00\x70\x00\x6d\
\x00\x0a\
\x0d\x8c\x59\x8d\
\x00\x69\
\x00\x6d\x00\x61\x00\x67\x00\x65\x00\x33\x00\x2e\x00\x78\x00\x70\x00\x6d\
\x00\x0a\
\x0d\x8d\x59\x8d\
\x00\x69\
\x00\x6d\x00\x61\x00\x67\x00\x65\x00\x30\x00\x2e\x00\x78\x00\x70\x00\x6d\
\x00\x0a\
\x0d\x8e\x59\x8d\
\x00\x69\
\x00\x6d\x00\x61\x00\x67\x00\x65\x00\x31\x00\x2e\x00\x78\x00\x70\x00\x6d\
"
qt_resource_struct = "\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x07\x00\x00\x00\x02\
\x00\x00\x00\x10\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\
\x00\x00\x00\x2a\x00\x01\x00\x00\x00\x01\x00\x00\x0e\xa3\
\x00\x00\x00\x44\x00\x00\x00\x00\x00\x01\x00\x00\x0f\x54\
\x00\x00\x00\x5e\x00\x01\x00\x00\x00\x01\x00\x00\x25\x37\
\x00\x00\x00\x78\x00\x01\x00\x00\x00\x01\x00\x00\x2a\x54\
\x00\x00\x00\x92\x00\x01\x00\x00\x00\x01\x00\x00\x2a\xff\
\x00\x00\x00\xac\x00\x01\x00\x00\x00\x01\x00\x00\x2b\xb0\
"
def qInitResources():
QtCore.qRegisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data)
def qCleanupResources():
QtCore.qUnregisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data)
qInitResources()
| 61.924769
| 96
| 0.726445
| 12,894
| 53,503
| 3.012952
| 0.023344
| 0.10981
| 0.140158
| 0.18626
| 0.68079
| 0.646117
| 0.617854
| 0.578779
| 0.518623
| 0.45229
| 0
| 0.406816
| 0.01727
| 53,503
| 863
| 97
| 61.996524
| 0.332053
| 0.003383
| 0
| 0.116745
| 0
| 0.936321
| 0
| 0
| 0
| 1
| 0.00015
| 0
| 0
| 1
| 0.002358
| false
| 0
| 0.001179
| 0
| 0.003538
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
c7b84430c730d6b0cc48bc3d044e98ae46b705b9
| 209
|
py
|
Python
|
mayan/apps/file_metadata/exceptions.py
|
atitaya1412/Mayan-EDMS
|
bda9302ba4b743e7d829ad118b8b836221888172
|
[
"Apache-2.0"
] | 343
|
2015-01-05T14:19:35.000Z
|
2018-12-10T19:07:48.000Z
|
mayan/apps/file_metadata/exceptions.py
|
atitaya1412/Mayan-EDMS
|
bda9302ba4b743e7d829ad118b8b836221888172
|
[
"Apache-2.0"
] | 191
|
2015-01-03T00:48:19.000Z
|
2018-11-30T09:10:25.000Z
|
mayan/apps/file_metadata/exceptions.py
|
atitaya1412/Mayan-EDMS
|
bda9302ba4b743e7d829ad118b8b836221888172
|
[
"Apache-2.0"
] | 257
|
2019-05-14T10:26:37.000Z
|
2022-03-30T03:37:36.000Z
|
class FileMetadataError(Exception):
"""Base file metadata driver exception."""
class FileMetadataDriverError(FileMetadataError):
"""Exception raised when a driver encounters an unexpected error."""
| 26.125
| 72
| 0.76555
| 20
| 209
| 8
| 0.75
| 0.325
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.138756
| 209
| 7
| 73
| 29.857143
| 0.888889
| 0.473684
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
c7d40a0415ec0ba72c67a524c1156040bca5a953
| 831
|
py
|
Python
|
utils/emojis.py
|
nigelbowden/wlanpi-chat-bot
|
cedab0e83d6e33d47f66c1a3be202564f95ac408
|
[
"MIT"
] | null | null | null |
utils/emojis.py
|
nigelbowden/wlanpi-chat-bot
|
cedab0e83d6e33d47f66c1a3be202564f95ac408
|
[
"MIT"
] | null | null | null |
utils/emojis.py
|
nigelbowden/wlanpi-chat-bot
|
cedab0e83d6e33d47f66c1a3be202564f95ac408
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python3
# -*- coding: utf-8 -*-
def cats(number=100):
return "\U0001F431 " * number
def green_circle(number=1):
return "\U0001F7E2 " * number
def blue_circle(number=1):
return "\U0001F535 " * number
def red_circle(number=1):
return "\U0001F534 " * number
def yellow_circle(number=1):
return "\U0001F7E1 " * number
def graph(number=1):
return "\U0001F4c8 " * number
def bar_chart(number=1):
return "\U0001F4ca " * number
def right_arrow(number=1):
return "\U000027A1 " * number
def hour_glass(number=1):
return "\U000023F3 " * number
def hour_glass_done(number=1):
return "\U0000231B " * number
def cool(number=1):
return "\U0001F60E " * number
def beer(number=1):
return "\U0001F37A " * number
def coffee(number=1):
return "\U00002615 " * number
| 20.268293
| 34
| 0.652226
| 106
| 831
| 5.028302
| 0.386792
| 0.202627
| 0.292683
| 0.142589
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.154545
| 0.205776
| 831
| 41
| 35
| 20.268293
| 0.65303
| 0.046931
| 0
| 0
| 0
| 0
| 0.180784
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
1bf3b227ca4bc9b4eb45f5541dcb2a8a98cc0c64
| 252
|
py
|
Python
|
pystratis/api/staking/requestmodels/__init__.py
|
TjadenFroyda/pyStratis
|
9cc7620d7506637f8a2b84003d931eceb36ac5f2
|
[
"MIT"
] | 8
|
2021-06-30T20:44:22.000Z
|
2021-12-07T14:42:22.000Z
|
pystratis/api/staking/requestmodels/__init__.py
|
TjadenFroyda/pyStratis
|
9cc7620d7506637f8a2b84003d931eceb36ac5f2
|
[
"MIT"
] | 2
|
2021-07-01T11:50:18.000Z
|
2022-01-25T18:39:49.000Z
|
pystratis/api/staking/requestmodels/__init__.py
|
TjadenFroyda/pyStratis
|
9cc7620d7506637f8a2b84003d931eceb36ac5f2
|
[
"MIT"
] | 4
|
2021-07-01T04:36:42.000Z
|
2021-09-17T10:54:19.000Z
|
from .startmultistakingrequest import StartMultiStakingRequest
from .startstakingrequest import StartStakingRequest
from .stopstakingrequest import StopStakingRequest
__all__ = ['StartStakingRequest', 'StopStakingRequest', 'StartMultiStakingRequest']
| 42
| 83
| 0.873016
| 16
| 252
| 13.5
| 0.375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.071429
| 252
| 5
| 84
| 50.4
| 0.923077
| 0
| 0
| 0
| 0
| 0
| 0.242063
| 0.095238
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
4007ea23f366bb159f645605c0459dea514711c6
| 35,480
|
py
|
Python
|
threedi_modelchecker/migrations/versions/0200_initial.py
|
nens/threedi-modelchecker
|
028d0c5847532526d3e1cb6ba3dd6a6d7494d648
|
[
"MIT"
] | null | null | null |
threedi_modelchecker/migrations/versions/0200_initial.py
|
nens/threedi-modelchecker
|
028d0c5847532526d3e1cb6ba3dd6a6d7494d648
|
[
"MIT"
] | 88
|
2019-08-08T12:23:15.000Z
|
2022-03-28T09:45:14.000Z
|
threedi_modelchecker/migrations/versions/0200_initial.py
|
nens/threedi-modelchecker
|
028d0c5847532526d3e1cb6ba3dd6a6d7494d648
|
[
"MIT"
] | null | null | null |
"""Create all tables if they do not exist already.
Revision ID: 0200
Revises:
Create Date: 2021-02-15 16:31:00.792077
"""
from alembic import op
from sqlalchemy.engine.reflection import Inspector
import geoalchemy2
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "0200"
down_revision = None
branch_labels = None
depends_on = None
existing_tables = []
def _get_existing_tables():
"""Fill the global 'existing_tables'"""
global existing_tables
conn = op.get_bind()
inspector = Inspector.from_engine(conn)
existing_tables = inspector.get_table_names()
def create_table_if_not_exists(table_name, *args, **kwargs):
"""Create a table if it is not in the global 'existing_tables'"""
if table_name in existing_tables:
return
else:
return op.create_table(table_name, *args, **kwargs)
def upgrade():
# Setup the global 'existing_tables'
_get_existing_tables()
# Initialize the Spatialite if necessary:
conn = op.get_bind()
if conn.dialect.name == "sqlite" and "spatial_ref_sys" not in existing_tables:
op.execute("SELECT InitSpatialMetadata()")
create_table_if_not_exists(
"v2_2d_boundary_conditions",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("display_name", sa.String(length=255), nullable=True),
sa.Column("timeseries", sa.Text(), nullable=True),
sa.Column("boundary_type", sa.Integer(), nullable=False),
sa.Column(
"the_geom",
geoalchemy2.types.Geometry(
geometry_type="LINESTRING",
srid=4326,
management=True,
),
nullable=False,
),
sa.PrimaryKeyConstraint("id"),
)
create_table_if_not_exists(
"v2_2d_lateral",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("type", sa.Integer(), nullable=False),
sa.Column(
"the_geom",
geoalchemy2.types.Geometry(
geometry_type="POINT",
srid=4326,
management=True,
),
nullable=False,
),
sa.Column("timeseries", sa.Text(), nullable=False),
sa.PrimaryKeyConstraint("id"),
)
create_table_if_not_exists(
"v2_calculation_point",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("content_type_id", sa.Integer(), nullable=True),
sa.Column("user_ref", sa.String(length=80), nullable=False),
sa.Column("calc_type", sa.Integer(), nullable=True),
sa.Column(
"the_geom",
geoalchemy2.types.Geometry(
geometry_type="POINT",
srid=4326,
management=True,
),
nullable=False,
),
sa.PrimaryKeyConstraint("id"),
)
create_table_if_not_exists(
"v2_connection_nodes",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("storage_area", sa.Float(), nullable=True),
sa.Column("initial_waterlevel", sa.Float(), nullable=True),
sa.Column(
"the_geom",
geoalchemy2.types.Geometry(
geometry_type="POINT",
srid=4326,
management=True,
),
nullable=False,
),
sa.Column(
"the_geom_linestring",
geoalchemy2.types.Geometry(
geometry_type="LINESTRING",
srid=4326,
management=True,
),
nullable=True,
),
sa.Column("code", sa.String(length=100), nullable=True),
sa.PrimaryKeyConstraint("id"),
)
create_table_if_not_exists(
"v2_control_delta",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("measure_variable", sa.String(length=50), nullable=True),
sa.Column("measure_delta", sa.String(length=50), nullable=True),
sa.Column("measure_dt", sa.Float(), nullable=True),
sa.Column("action_type", sa.String(length=50), nullable=True),
sa.Column("action_value", sa.String(length=50), nullable=True),
sa.Column("action_time", sa.Float(), nullable=True),
sa.Column("target_type", sa.String(length=100), nullable=True),
sa.Column("target_id", sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint("id"),
)
create_table_if_not_exists(
"v2_control_group",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("name", sa.String(length=100), nullable=True),
sa.Column("description", sa.Text(), nullable=True),
sa.PrimaryKeyConstraint("id"),
)
create_table_if_not_exists(
"v2_control_measure_group",
sa.Column("id", sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint("id"),
)
create_table_if_not_exists(
"v2_control_memory",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("measure_variable", sa.String(length=50), nullable=True),
sa.Column("upper_threshold", sa.Float(), nullable=True),
sa.Column("lower_threshold", sa.Float(), nullable=True),
sa.Column("action_type", sa.String(length=50), nullable=True),
sa.Column("action_value", sa.String(length=50), nullable=True),
sa.Column("target_type", sa.String(length=100), nullable=True),
sa.Column("target_id", sa.Integer(), nullable=True),
sa.Column("is_active", sa.Boolean(), nullable=False),
sa.Column("is_inverse", sa.Boolean(), nullable=False),
sa.PrimaryKeyConstraint("id"),
)
create_table_if_not_exists(
"v2_control_pid",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("measure_variable", sa.String(length=50), nullable=True),
sa.Column("setpoint", sa.Float(), nullable=True),
sa.Column("kp", sa.Float(), nullable=True),
sa.Column("ki", sa.Float(), nullable=True),
sa.Column("kd", sa.Float(), nullable=True),
sa.Column("action_type", sa.String(length=50), nullable=True),
sa.Column("target_type", sa.String(length=100), nullable=True),
sa.Column("target_upper_limit", sa.String(length=50), nullable=True),
sa.Column("target_lower_limit", sa.String(length=50), nullable=True),
sa.PrimaryKeyConstraint("id"),
)
create_table_if_not_exists(
"v2_control_table",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("action_table", sa.Text(), nullable=True),
sa.Column("action_type", sa.String(length=50), nullable=True),
sa.Column("measure_variable", sa.String(length=50), nullable=True),
sa.Column("measure_operator", sa.String(length=2), nullable=True),
sa.Column("target_type", sa.String(length=100), nullable=True),
sa.Column("target_id", sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint("id"),
)
create_table_if_not_exists(
"v2_control_timed",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("action_type", sa.String(length=50), nullable=True),
sa.Column("action_table", sa.Text(), nullable=True),
sa.Column("target_type", sa.String(length=100), nullable=True),
sa.Column("target_id", sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint("id"),
)
create_table_if_not_exists(
"v2_cross_section_definition",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("width", sa.String(length=255), nullable=True),
sa.Column("height", sa.String(length=255), nullable=True),
sa.Column("shape", sa.Integer(), nullable=True),
sa.Column("code", sa.String(length=100), nullable=True),
sa.PrimaryKeyConstraint("id"),
)
create_table_if_not_exists(
"v2_dem_average_area",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column(
"the_geom",
geoalchemy2.types.Geometry(
geometry_type="POLYGON",
srid=4326,
management=True,
),
nullable=False,
),
sa.PrimaryKeyConstraint("id"),
)
create_table_if_not_exists(
"v2_floodfill",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("waterlevel", sa.Float(), nullable=True),
sa.Column(
"the_geom",
geoalchemy2.types.Geometry(
geometry_type="POINT",
srid=4326,
management=True,
),
nullable=True,
),
sa.PrimaryKeyConstraint("id"),
)
create_table_if_not_exists(
"v2_grid_refinement",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("display_name", sa.String(length=255), nullable=True),
sa.Column("refinement_level", sa.Integer(), nullable=False),
sa.Column(
"the_geom",
geoalchemy2.types.Geometry(
geometry_type="LINESTRING",
srid=4326,
management=True,
),
nullable=False,
),
sa.Column("code", sa.String(length=100), nullable=True),
sa.PrimaryKeyConstraint("id"),
)
create_table_if_not_exists(
"v2_grid_refinement_area",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("display_name", sa.String(length=255), nullable=True),
sa.Column("refinement_level", sa.Integer(), nullable=False),
sa.Column("code", sa.String(length=100), nullable=True),
sa.Column(
"the_geom",
geoalchemy2.types.Geometry(
geometry_type="POLYGON",
srid=4326,
management=True,
),
nullable=False,
),
sa.PrimaryKeyConstraint("id"),
)
create_table_if_not_exists(
"v2_groundwater",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("groundwater_impervious_layer_level", sa.Float(), nullable=True),
sa.Column(
"groundwater_impervious_layer_level_file",
sa.String(length=255),
nullable=True,
),
sa.Column(
"groundwater_impervious_layer_level_type", sa.Integer(), nullable=True
),
sa.Column("phreatic_storage_capacity", sa.Float(), nullable=True),
sa.Column(
"phreatic_storage_capacity_file", sa.String(length=255), nullable=True
),
sa.Column("phreatic_storage_capacity_type", sa.Integer(), nullable=True),
sa.Column("equilibrium_infiltration_rate", sa.Float(), nullable=True),
sa.Column(
"equilibrium_infiltration_rate_file", sa.String(length=255), nullable=True
),
sa.Column("equilibrium_infiltration_rate_type", sa.Integer(), nullable=True),
sa.Column("initial_infiltration_rate", sa.Float(), nullable=True),
sa.Column(
"initial_infiltration_rate_file", sa.String(length=255), nullable=True
),
sa.Column("initial_infiltration_rate_type", sa.Integer(), nullable=True),
sa.Column("infiltration_decay_period", sa.Float(), nullable=True),
sa.Column(
"infiltration_decay_period_file", sa.String(length=255), nullable=True
),
sa.Column("infiltration_decay_period_type", sa.Integer(), nullable=True),
sa.Column("groundwater_hydro_connectivity", sa.Float(), nullable=True),
sa.Column(
"groundwater_hydro_connectivity_file", sa.String(length=255), nullable=True
),
sa.Column("groundwater_hydro_connectivity_type", sa.Integer(), nullable=True),
sa.Column("display_name", sa.String(length=255), nullable=True),
sa.Column("leakage", sa.Float(), nullable=True),
sa.Column("leakage_file", sa.String(length=255), nullable=True),
sa.PrimaryKeyConstraint("id"),
)
create_table_if_not_exists(
"v2_impervious_surface",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("code", sa.String(length=100), nullable=True),
sa.Column("display_name", sa.String(length=255), nullable=True),
sa.Column("surface_inclination", sa.String(length=64), nullable=False),
sa.Column("surface_class", sa.String(length=128), nullable=False),
sa.Column("surface_sub_class", sa.String(length=128), nullable=True),
sa.Column("zoom_category", sa.Integer(), nullable=True),
sa.Column("nr_of_inhabitants", sa.Float(), nullable=True),
sa.Column("area", sa.Float(), nullable=True),
sa.Column("dry_weather_flow", sa.Float(), nullable=True),
sa.Column(
"the_geom",
geoalchemy2.types.Geometry(
geometry_type="POLYGON",
srid=4326,
management=True,
),
nullable=True,
),
sa.PrimaryKeyConstraint("id"),
)
create_table_if_not_exists(
"v2_interflow",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("interflow_type", sa.Integer(), nullable=False),
sa.Column("porosity", sa.Float(), nullable=True),
sa.Column("porosity_file", sa.String(length=255), nullable=True),
sa.Column("porosity_layer_thickness", sa.Float(), nullable=True),
sa.Column("impervious_layer_elevation", sa.Float(), nullable=True),
sa.Column("hydraulic_conductivity", sa.Float(), nullable=True),
sa.Column("hydraulic_conductivity_file", sa.String(length=255), nullable=True),
sa.Column("display_name", sa.String(length=255), nullable=True),
sa.PrimaryKeyConstraint("id"),
)
create_table_if_not_exists(
"v2_levee",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("code", sa.String(length=100), nullable=True),
sa.Column("crest_level", sa.Float(), nullable=True),
sa.Column(
"the_geom",
geoalchemy2.types.Geometry(
geometry_type="LINESTRING",
srid=4326,
management=True,
),
nullable=False,
),
sa.Column("material", sa.Integer(), nullable=True),
sa.Column("max_breach_depth", sa.Float(), nullable=True),
sa.PrimaryKeyConstraint("id"),
)
create_table_if_not_exists(
"v2_numerical_settings",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("cfl_strictness_factor_1d", sa.Float(), nullable=True),
sa.Column("cfl_strictness_factor_2d", sa.Float(), nullable=True),
sa.Column("convergence_cg", sa.Float(), nullable=True),
sa.Column("convergence_eps", sa.Float(), nullable=True),
sa.Column("flow_direction_threshold", sa.Float(), nullable=True),
sa.Column("frict_shallow_water_correction", sa.Integer(), nullable=True),
sa.Column("general_numerical_threshold", sa.Float(), nullable=True),
sa.Column("integration_method", sa.Integer(), nullable=True),
sa.Column("limiter_grad_1d", sa.Integer(), nullable=True),
sa.Column("limiter_grad_2d", sa.Integer(), nullable=True),
sa.Column("limiter_slope_crossectional_area_2d", sa.Integer(), nullable=True),
sa.Column("limiter_slope_friction_2d", sa.Integer(), nullable=True),
sa.Column("max_nonlin_iterations", sa.Integer(), nullable=True),
sa.Column("max_degree", sa.Integer(), nullable=False),
sa.Column("minimum_friction_velocity", sa.Float(), nullable=True),
sa.Column("minimum_surface_area", sa.Float(), nullable=True),
sa.Column("precon_cg", sa.Integer(), nullable=True),
sa.Column("preissmann_slot", sa.Float(), nullable=True),
sa.Column("pump_implicit_ratio", sa.Float(), nullable=True),
sa.Column("thin_water_layer_definition", sa.Float(), nullable=True),
sa.Column("use_of_cg", sa.Integer(), nullable=False),
sa.Column("use_of_nested_newton", sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint("id"),
)
create_table_if_not_exists(
"v2_obstacle",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("code", sa.String(length=100), nullable=True),
sa.Column("crest_level", sa.Float(), nullable=False),
sa.Column(
"the_geom",
geoalchemy2.types.Geometry(
geometry_type="LINESTRING",
srid=4326,
management=True,
),
nullable=False,
),
sa.PrimaryKeyConstraint("id"),
)
create_table_if_not_exists(
"v2_simple_infiltration",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("infiltration_rate", sa.Float(), nullable=True),
sa.Column("infiltration_rate_file", sa.String(length=255), nullable=True),
sa.Column("infiltration_surface_option", sa.Integer(), nullable=True),
sa.Column("max_infiltration_capacity_file", sa.Text(), nullable=True),
sa.Column("display_name", sa.String(length=255), nullable=True),
sa.PrimaryKeyConstraint("id"),
)
create_table_if_not_exists(
"v2_surface_parameters",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("outflow_delay", sa.Float(), nullable=False),
sa.Column("surface_layer_thickness", sa.Float(), nullable=False),
sa.Column("infiltration", sa.Boolean(), nullable=False),
sa.Column("max_infiltration_capacity", sa.Float(), nullable=False),
sa.Column("min_infiltration_capacity", sa.Float(), nullable=False),
sa.Column("infiltration_decay_constant", sa.Float(), nullable=False),
sa.Column("infiltration_recovery_constant", sa.Float(), nullable=False),
sa.PrimaryKeyConstraint("id"),
)
create_table_if_not_exists(
"v2_1d_boundary_conditions",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("boundary_type", sa.Integer(), nullable=False),
sa.Column("timeseries", sa.Text(), nullable=True),
sa.Column("connection_node_id", sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("connection_node_id"),
)
create_table_if_not_exists(
"v2_1d_lateral",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("connection_node_id", sa.Integer(), nullable=False),
sa.Column("timeseries", sa.Text(), nullable=False),
sa.PrimaryKeyConstraint("id"),
)
create_table_if_not_exists(
"v2_channel",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("display_name", sa.String(length=255), nullable=True),
sa.Column("code", sa.String(length=100), nullable=True),
sa.Column("calculation_type", sa.Integer(), nullable=False),
sa.Column("dist_calc_points", sa.Float(), nullable=True),
sa.Column("zoom_category", sa.Integer(), nullable=True),
sa.Column(
"the_geom",
geoalchemy2.types.Geometry(
geometry_type="LINESTRING",
srid=4326,
management=True,
),
nullable=False,
),
sa.Column("connection_node_start_id", sa.Integer(), nullable=False),
sa.Column("connection_node_end_id", sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint("id"),
)
create_table_if_not_exists(
"v2_connected_pnt",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("calculation_pnt_id", sa.Integer(), nullable=False),
sa.Column("levee_id", sa.Integer(), nullable=True),
sa.Column("exchange_level", sa.Float(), nullable=True),
sa.Column(
"the_geom",
geoalchemy2.types.Geometry(
geometry_type="POINT",
srid=4326,
management=True,
),
nullable=False,
),
sa.PrimaryKeyConstraint("id"),
)
create_table_if_not_exists(
"v2_control",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("control_group_id", sa.Integer(), nullable=True),
sa.Column("measure_group_id", sa.Integer(), nullable=True),
sa.Column("control_type", sa.String(length=15), nullable=True),
sa.Column("control_id", sa.Integer(), nullable=True),
sa.Column("start", sa.String(length=50), nullable=True),
sa.Column("end", sa.String(length=50), nullable=True),
sa.Column("measure_frequency", sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint("id"),
)
create_table_if_not_exists(
"v2_control_measure_map",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("measure_group_id", sa.Integer(), nullable=True),
sa.Column("object_type", sa.String(length=100), nullable=True),
sa.Column("object_id", sa.Integer(), nullable=True),
sa.Column("weight", sa.Float(), nullable=True),
sa.PrimaryKeyConstraint("id"),
)
create_table_if_not_exists(
"v2_culvert",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("display_name", sa.String(length=255), nullable=True),
sa.Column("code", sa.String(length=100), nullable=True),
sa.Column("calculation_type", sa.Integer(), nullable=True),
sa.Column("friction_value", sa.Float(), nullable=False),
sa.Column("friction_type", sa.Integer(), nullable=False),
sa.Column("dist_calc_points", sa.Float(), nullable=True),
sa.Column("zoom_category", sa.Integer(), nullable=True),
sa.Column("discharge_coefficient_positive", sa.Float(), nullable=True),
sa.Column("discharge_coefficient_negative", sa.Float(), nullable=True),
sa.Column("invert_level_start_point", sa.Float(), nullable=False),
sa.Column("invert_level_end_point", sa.Float(), nullable=False),
sa.Column(
"the_geom",
geoalchemy2.types.Geometry(
geometry_type="LINESTRING",
srid=4326,
management=True,
),
nullable=True,
),
sa.Column("connection_node_start_id", sa.Integer(), nullable=False),
sa.Column("connection_node_end_id", sa.Integer(), nullable=False),
sa.Column("cross_section_definition_id", sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint("id"),
)
create_table_if_not_exists(
"v2_global_settings",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("use_2d_flow", sa.Boolean(), nullable=False),
sa.Column("use_1d_flow", sa.Boolean(), nullable=False),
sa.Column("manhole_storage_area", sa.Float(), nullable=True),
sa.Column("name", sa.String(length=128), nullable=True),
sa.Column("sim_time_step", sa.Float(), nullable=False),
sa.Column("output_time_step", sa.Float(), nullable=True),
sa.Column("nr_timesteps", sa.Integer(), nullable=False),
sa.Column("start_time", sa.Text(), nullable=True),
sa.Column("start_date", sa.Text(), nullable=False),
sa.Column("grid_space", sa.Float(), nullable=False),
sa.Column("dist_calc_points", sa.Float(), nullable=False),
sa.Column("kmax", sa.Integer(), nullable=False),
sa.Column("guess_dams", sa.Integer(), nullable=True),
sa.Column("table_step_size", sa.Float(), nullable=False),
sa.Column("flooding_threshold", sa.Float(), nullable=False),
sa.Column("advection_1d", sa.Integer(), nullable=False),
sa.Column("advection_2d", sa.Integer(), nullable=False),
sa.Column("dem_file", sa.String(length=255), nullable=True),
sa.Column("frict_type", sa.Integer(), nullable=True),
sa.Column("frict_coef", sa.Float(), nullable=False),
sa.Column("frict_coef_file", sa.String(length=255), nullable=True),
sa.Column("water_level_ini_type", sa.Integer(), nullable=True),
sa.Column("initial_waterlevel", sa.Float(), nullable=False),
sa.Column("initial_waterlevel_file", sa.String(length=255), nullable=True),
sa.Column("interception_global", sa.Float(), nullable=True),
sa.Column("interception_file", sa.String(length=255), nullable=True),
sa.Column("dem_obstacle_detection", sa.Boolean(), nullable=False),
sa.Column("dem_obstacle_height", sa.Float(), nullable=True),
sa.Column("embedded_cutoff_threshold", sa.Float(), nullable=True),
sa.Column("epsg_code", sa.Integer(), nullable=True),
sa.Column("timestep_plus", sa.Boolean(), nullable=False),
sa.Column("max_angle_1d_advection", sa.Float(), nullable=True),
sa.Column("minimum_sim_time_step", sa.Float(), nullable=True),
sa.Column("maximum_sim_time_step", sa.Float(), nullable=True),
sa.Column("frict_avg", sa.Integer(), nullable=True),
sa.Column("wind_shielding_file", sa.String(length=255), nullable=True),
sa.Column("use_0d_inflow", sa.Integer(), nullable=True),
sa.Column("table_step_size_1d", sa.Float(), nullable=True),
sa.Column("table_step_size_volume_2d", sa.Float(), nullable=True),
sa.Column("use_2d_rain", sa.Integer(), nullable=False),
sa.Column("initial_groundwater_level", sa.Float(), nullable=True),
sa.Column(
"initial_groundwater_level_file", sa.String(length=255), nullable=True
),
sa.Column("initial_groundwater_level_type", sa.Integer(), nullable=True),
sa.Column("numerical_settings_id", sa.Integer(), nullable=False),
sa.Column("interflow_settings_id", sa.Integer(), nullable=True),
sa.Column("control_group_id", sa.Integer(), nullable=True),
sa.Column("simple_infiltration_settings_id", sa.Integer(), nullable=True),
sa.Column("groundwater_settings_id", sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint("id"),
)
create_table_if_not_exists(
"v2_impervious_surface_map",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("percentage", sa.Float(), nullable=False),
sa.Column("impervious_surface_id", sa.Integer(), nullable=False),
sa.Column("connection_node_id", sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint("id"),
)
create_table_if_not_exists(
"v2_manhole",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("display_name", sa.String(length=255), nullable=True),
sa.Column("code", sa.String(length=100), nullable=True),
sa.Column("zoom_category", sa.Integer(), nullable=True),
sa.Column("shape", sa.String(length=4), nullable=True),
sa.Column("width", sa.Float(), nullable=True),
sa.Column("length", sa.Float(), nullable=True),
sa.Column("surface_level", sa.Float(), nullable=True),
sa.Column("bottom_level", sa.Float(), nullable=False),
sa.Column("drain_level", sa.Float(), nullable=True),
sa.Column("sediment_level", sa.Float(), nullable=True),
sa.Column("manhole_indicator", sa.Integer(), nullable=True),
sa.Column("calculation_type", sa.Integer(), nullable=True),
sa.Column("connection_node_id", sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint("id"),
)
create_table_if_not_exists(
"v2_orifice",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("code", sa.String(length=100), nullable=True),
sa.Column("display_name", sa.String(length=255), nullable=True),
sa.Column("zoom_category", sa.Integer(), nullable=True),
sa.Column("crest_type", sa.Integer(), nullable=False),
sa.Column("crest_level", sa.Float(), nullable=False),
sa.Column("friction_value", sa.Float(), nullable=True),
sa.Column("friction_type", sa.Integer(), nullable=True),
sa.Column("discharge_coefficient_positive", sa.Float(), nullable=True),
sa.Column("discharge_coefficient_negative", sa.Float(), nullable=True),
sa.Column("sewerage", sa.Boolean(), nullable=False),
sa.Column("connection_node_start_id", sa.Integer(), nullable=False),
sa.Column("connection_node_end_id", sa.Integer(), nullable=False),
sa.Column("cross_section_definition_id", sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint("id"),
)
create_table_if_not_exists(
"v2_pipe",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("display_name", sa.String(length=255), nullable=True),
sa.Column("code", sa.String(length=100), nullable=True),
sa.Column("profile_num", sa.Integer(), nullable=True),
sa.Column("sewerage_type", sa.Integer(), nullable=True),
sa.Column("calculation_type", sa.Integer(), nullable=False),
sa.Column("invert_level_start_point", sa.Float(), nullable=False),
sa.Column("invert_level_end_point", sa.Float(), nullable=False),
sa.Column("friction_value", sa.Float(), nullable=False),
sa.Column("friction_type", sa.Integer(), nullable=False),
sa.Column("dist_calc_points", sa.Float(), nullable=True),
sa.Column("material", sa.Integer(), nullable=True),
sa.Column("original_length", sa.Float(), nullable=True),
sa.Column("zoom_category", sa.Integer(), nullable=True),
sa.Column("connection_node_start_id", sa.Integer(), nullable=False),
sa.Column("connection_node_end_id", sa.Integer(), nullable=False),
sa.Column("cross_section_definition_id", sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint("id"),
)
create_table_if_not_exists(
"v2_pumpstation",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("code", sa.String(length=100), nullable=True),
sa.Column("display_name", sa.String(length=255), nullable=True),
sa.Column("zoom_category", sa.Integer(), nullable=True),
sa.Column("classification", sa.Integer(), nullable=True),
sa.Column("sewerage", sa.Boolean(), nullable=True),
sa.Column("type", sa.Integer(), nullable=False),
sa.Column("start_level", sa.Float(), nullable=False),
sa.Column("lower_stop_level", sa.Float(), nullable=False),
sa.Column("upper_stop_level", sa.Float(), nullable=True),
sa.Column("capacity", sa.Float(), nullable=False),
sa.Column("connection_node_start_id", sa.Integer(), nullable=False),
sa.Column("connection_node_end_id", sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint("id"),
)
create_table_if_not_exists(
"v2_surface",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("display_name", sa.String(length=255), nullable=True),
sa.Column("code", sa.String(length=100), nullable=True),
sa.Column("zoom_category", sa.Integer(), nullable=True),
sa.Column("nr_of_inhabitants", sa.Float(), nullable=True),
sa.Column("dry_weather_flow", sa.Float(), nullable=True),
sa.Column("function", sa.String(length=64), nullable=True),
sa.Column("area", sa.Float(), nullable=True),
sa.Column("surface_parameters_id", sa.Integer(), nullable=False),
sa.Column(
"the_geom",
geoalchemy2.types.Geometry(
geometry_type="POLYGON",
srid=4326,
management=True,
),
nullable=True,
),
sa.PrimaryKeyConstraint("id"),
)
create_table_if_not_exists(
"v2_surface_map",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("surface_type", sa.String(length=40), nullable=False),
sa.Column("surface_id", sa.Integer(), nullable=False),
sa.Column("connection_node_id", sa.Integer(), nullable=False),
sa.Column("percentage", sa.Float(), nullable=True),
sa.PrimaryKeyConstraint("id"),
)
create_table_if_not_exists(
"v2_weir",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("code", sa.String(length=100), nullable=True),
sa.Column("display_name", sa.String(length=255), nullable=True),
sa.Column("crest_level", sa.Float(), nullable=False),
sa.Column("crest_type", sa.Integer(), nullable=False),
sa.Column("friction_value", sa.Float(), nullable=True),
sa.Column("friction_type", sa.Integer(), nullable=True),
sa.Column("discharge_coefficient_positive", sa.Float(), nullable=True),
sa.Column("discharge_coefficient_negative", sa.Float(), nullable=True),
sa.Column("sewerage", sa.Boolean(), nullable=True),
sa.Column("external", sa.Boolean(), nullable=True),
sa.Column("zoom_category", sa.Integer(), nullable=True),
sa.Column("connection_node_start_id", sa.Integer(), nullable=False),
sa.Column("connection_node_end_id", sa.Integer(), nullable=False),
sa.Column("cross_section_definition_id", sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint("id"),
)
create_table_if_not_exists(
"v2_aggregation_settings",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("global_settings_id", sa.Integer(), nullable=True),
sa.Column("var_name", sa.String(length=100), nullable=False),
sa.Column("flow_variable", sa.String(length=100), nullable=False),
sa.Column("aggregation_method", sa.String(length=100), nullable=True),
sa.Column("aggregation_in_space", sa.Boolean(), nullable=False),
sa.Column("timestep", sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint("id"),
)
create_table_if_not_exists(
"v2_cross_section_location",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("code", sa.String(length=100), nullable=True),
sa.Column("reference_level", sa.Float(), nullable=False),
sa.Column("friction_type", sa.Integer(), nullable=False),
sa.Column("friction_value", sa.Float(), nullable=False),
sa.Column("bank_level", sa.Float(), nullable=True),
sa.Column(
"the_geom",
geoalchemy2.types.Geometry(
geometry_type="POINT",
srid=4326,
management=True,
),
nullable=False,
),
sa.Column("channel_id", sa.Integer(), nullable=False),
sa.Column("definition_id", sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint("id"),
)
create_table_if_not_exists(
"v2_windshielding",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("north", sa.Float(), nullable=True),
sa.Column("northeast", sa.Float(), nullable=True),
sa.Column("east", sa.Float(), nullable=True),
sa.Column("southeast", sa.Float(), nullable=True),
sa.Column("south", sa.Float(), nullable=True),
sa.Column("southwest", sa.Float(), nullable=True),
sa.Column("west", sa.Float(), nullable=True),
sa.Column("northwest", sa.Float(), nullable=True),
sa.Column(
"the_geom",
geoalchemy2.types.Geometry(
geometry_type="POINT",
srid=4326,
management=True,
),
nullable=True,
),
sa.Column("channel_id", sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint("id"),
)
def downgrade():
pass
| 45.604113
| 87
| 0.619927
| 4,156
| 35,480
| 5.107074
| 0.078681
| 0.14775
| 0.157644
| 0.20636
| 0.894747
| 0.873451
| 0.814794
| 0.721885
| 0.664594
| 0.589022
| 0
| 0.014916
| 0.223365
| 35,480
| 777
| 88
| 45.662806
| 0.755362
| 0.009132
| 0
| 0.596282
| 0
| 0
| 0.179189
| 0.071067
| 0
| 0
| 0
| 0
| 0
| 1
| 0.005312
| false
| 0.001328
| 0.005312
| 0
| 0.01328
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
401c016e7ede8a897a8e67db5dc28f4ca2139a1e
| 9,296
|
py
|
Python
|
gpsig/signature_algs.py
|
vishalbelsare/GPSig
|
d250295faeb84d82af097e0dbbe441614888fd69
|
[
"Apache-2.0"
] | 25
|
2019-06-20T10:38:42.000Z
|
2022-03-19T03:56:44.000Z
|
gpsig/signature_algs.py
|
vishalbelsare/GPSig
|
d250295faeb84d82af097e0dbbe441614888fd69
|
[
"Apache-2.0"
] | 5
|
2019-12-16T21:55:14.000Z
|
2022-02-10T00:26:07.000Z
|
gpsig/signature_algs.py
|
vishalbelsare/GPSig
|
d250295faeb84d82af097e0dbbe441614888fd69
|
[
"Apache-2.0"
] | 5
|
2020-08-20T12:00:21.000Z
|
2022-02-28T12:09:03.000Z
|
import numpy as np
import tensorflow as tf
from gpflow import settings
from .low_rank_calculations import lr_hadamard_prod_rand
def signature_kern_first_order(M, num_levels, difference = True):
"""
Compute the first-order signature kernel matrix
# Input
:M: (num_examples1, len_examples1, num_examples2, len_examples2) or (num_examples, len_examples, len_examples) tensors
:num_levels: number of signature levels to compute
# Output
:K: (num_examples1, num_examples2) or (num_examples) tensor
"""
if M.shape.ndims == 4:
num_examples1, len_examples1, num_examples2, len_examples2 = tf.unstack(tf.shape(M)[-4:])
K = [tf.ones((num_examples1, num_examples2), dtype=settings.float_type)]
else:
num_examples, len_examples = tf.shape(M)[0], tf.shape(M)[1]
K = [tf.ones((num_examples), dtype=settings.float_type)]
if difference:
M = M[:, 1:, ..., 1:] + M[:, :-1, ..., :-1] - M[:, :-1, ..., 1:] - M[:, 1:, ..., :-1]
K.append(tf.reduce_sum(M, axis=(1, -1)))
R = M
for i in range(2, num_levels+1):
R = M * tf.cumsum(tf.cumsum(R, exclusive=True, axis=1), exclusive=True, axis=-1)
K.append(tf.reduce_sum(R, axis=(1, -1)))
return tf.stack(K, axis=0)
def signature_kern_higher_order(M, num_levels, order=2, difference = True):
"""
Compute the higher-order signature kernel matrix
# Input
:M: (num_examples1, len_examples1, num_examples2, len_examples2) or (num_examples, len_examples, len_examples) tensors
:num_levels: number of signature levels to compute
:order: order of approximation to use in signature kernel
# Output
:K: (num_examples1, num_examples2) or (num_examples) tensor
"""
if M.shape.ndims == 4:
num_examples1, num_examples2 = tf.shape(M)[0], tf.shape(M)[2]
K = [tf.ones((num_examples1, num_examples2), dtype=settings.float_type)]
else:
num_examples = tf.shape(M)[0]
K = [tf.ones((num_examples), dtype=settings.float_type)]
if difference:
M = M[:, 1:, ..., 1:] + M[:, :-1, ..., :-1] - M[:, :-1, ..., 1:] - M[:, 1:, ..., :-1]
K.append(tf.reduce_sum(M, axis=(1, -1)))
R = np.asarray([[M]])
for i in range(2, num_levels+1):
d = min(i, order)
R_next = np.empty((d, d), dtype=tf.Tensor)
R_next[0, 0] = M * tf.cumsum(tf.cumsum(tf.add_n(R.flatten().tolist()), exclusive=True, axis=1), exclusive=True, axis=-1)
for j in range(2, d+1):
R_next[0, j-1] = 1 / tf.cast(j, settings.float_type) * M * tf.cumsum(tf.add_n(R[:, j-2].tolist()), exclusive=True, axis=1)
R_next[j-1, 0] = 1 / tf.cast(j, settings.float_type) * M * tf.cumsum(tf.add_n(R[j-2, :].tolist()), exclusive=True, axis=-1)
for k in range(2, d+1):
R_next[j-1, k-1] = 1 / (tf.cast(j, settings.float_type) * tf.cast(k, settings.float_type)) * M * R[j-2, k-2]
K.append(tf.reduce_sum(tf.add_n(R_next.flatten().tolist()), axis=(1, -1)))
R = R_next
return tf.stack(K, axis=0)
def tensor_kern(M, num_levels):
"""
Computing the square matrix of inner product of inducing tensors
# Input
:M: (num_levels*(num_levels+1)/2, num_tensors, num_tensors) tensor entries vs tensor entries kernel matrices
:num_levels: degree of truncation for the signatures
# Output
:K: (num_tensors, num_tensors) kernel matrix tensors
"""
num_tensors, num_tensors2 = tf.shape(M)[1], tf.shape(M)[2]
K = [tf.ones((num_tensors, num_tensors2), dtype=settings.float_type)]
k = 0
for i in range(1, num_levels+1):
R = M[k]
k += 1
for j in range(1, i):
R = M[k] * R
k += 1
K.append(R)
return tf.stack(K, axis=0)
def signature_kern_tens_vs_seq_first_order(M, num_levels, difference = True):
"""
Compute tensor vs (first-order) signature inner products
# Input
:M: (num_levels*(num_levels+1)/2, num_tensors, num_examples, len_examples)
:num_levels: degree of truncation for the signatures
# Output
:K: (num_tensors, num_examples) inner product matrix
"""
num_tensors, num_examples, len_examples = tf.unstack(tf.shape(M)[1:])
if difference:
M = M[..., 1:] - M[..., :-1] # difference along time series axis
K = [tf.ones((num_tensors, num_examples), dtype=settings.float_type)]
k = 0
for i in range(1, num_levels+1):
R = M[k]
k += 1
for j in range(1, i):
R = M[k] * tf.cumsum(R, exclusive=True, axis=2)
k += 1
K.append(tf.reduce_sum(R, axis=2))
return tf.stack(K, axis=0)
def signature_kern_tens_vs_seq_higher_order(M, num_levels, order=2, difference = True):
"""
Compute tensor vs (higher-order) signature inner products
# Input
:M: (num_levels*(num_levels+1)/2, num_tensors, num_examples, len_examples)
:num_levels: degree of truncation for the signatures
# Output
:K: (num_tensors, num_examples) inner product matrix
"""
num_tensors, num_examples, len_examples = tf.unstack(tf.shape(M)[1:])
if difference:
M = M[..., 1:] - M[..., :-1] # difference along time series axis
K = [tf.ones((num_tensors, num_examples), dtype=settings.float_type)]
k = 0
for i in range(1, num_levels+1):
R = np.asarray([M[k]])
k += 1
for j in range(1, i):
d = min(j+1, order)
R_next = np.empty((d), dtype=tf.Tensor)
R_next[0] = M[k] * tf.cumsum(tf.add_n(R.tolist()), exclusive=True, axis=2)
for l in range(1, d):
R_next[l] = 1. / tf.cast(l+1, settings.float_type) * M[k] * R[l-1]
R = R_next
k += 1
K.append(tf.reduce_sum(tf.add_n(R.tolist()), axis=2))
return tf.stack(K, axis=0)
def signature_kern_first_order_lr_feature(U, num_levels, rank_bound, sparsity = 'sqrt', seeds = None, difference = True):
"""
Compute feature map for (first-order) low-rank signatures from low-rank factor of big kernel matrix
# Input
:U: (num_examples, len_examples, num_components) low-rank feature representations for embedded sequences
:num_levels: degree of truncation for the signatures
:rank_bound: number of components used in the low-rank approximation
:sparsity: controls the sparsity of the randomized projection used for simplifying the low-rank factor at every iteration
possible values are: 'sqrt' - most accurate, but costly; 'log' - less accurate, but cheaper; 'lin' - sparsest, least accurate
:seeds: optional - (num_levels-1, 2) random seeds for random projections
# Output
:Phi: (num_levels+1,) list of low-rank factors
"""
num_examples, len_examples, num_components = tf.unstack(tf.shape(U))
Phi = [tf.ones((num_examples, 1), dtype=settings.float_type)]
if difference:
U = U[:, 1:, :] - U[:, :-1, :]
Phi.append(tf.reduce_sum(U, axis=1))
P = U
for i in range(2, num_levels+1):
P = tf.cumsum(P, axis=1, exclusive=True)
if seeds is None:
P = lr_hadamard_prod_rand(U, P, rank_bound, sparsity)
else:
P = lr_hadamard_prod_rand(U, P, rank_bound, sparsity, seeds[i-2])
Phi.append(tf.reduce_sum(U, axis=1))
return Phi
def tensor_kern_lr_feature(U, num_levels, rank_bound, sparsity = 'sqrt', seeds = None):
"""
Compute the low-rank feature map for tensors
# Input
:U: (num_levels*(num_levels+1)/2, num_tensors, num_components) low-rank feature representations for inducing tensors
:num_levels: degree of truncation for the signatures
:rank_bound: number of components used in the low-rank approximation
:sparsity: controls the sparsity of the randomized projection used for simplifying the low-rank factor at every iteration
possible values are: 'sqrt' - most accurate, but costly; 'log' - less accurate, but cheaper; 'lin' - sparsest, least accurate
:seeds: optional - (num_levels-1, 2) random seeds for random projections
# Output
:Phi: (num_levels+1,) list of low-rank factors for tensors
"""
num_tensors = tf.shape(U)[1]
Phi = [tf.ones((num_tensors, 1), dtype=settings.float_type)]
k = 0
for i in range(1, num_levels+1):
R = U[k]
k += 1
for j in range(1, i):
if seeds is None:
R = lr_hadamard_prod_rand(U[k], R, rank_bound, sparsity)
else:
R = lr_hadamard_prod_rand(U[k], R, rank_bound, sparsity, seeds[j-1])
k += 1
Phi.append(R)
return Phi
| 41.873874
| 150
| 0.577775
| 1,325
| 9,296
| 3.904151
| 0.109434
| 0.057413
| 0.028997
| 0.038276
| 0.817514
| 0.788131
| 0.749275
| 0.709066
| 0.639088
| 0.604292
| 0
| 0.024349
| 0.293137
| 9,296
| 222
| 151
| 41.873874
| 0.762898
| 0.357681
| 0
| 0.538462
| 0
| 0
| 0.001463
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.059829
| false
| 0
| 0.034188
| 0
| 0.153846
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
402a1d42f67e3565c34e835b7d4af392246fa9e5
| 278
|
py
|
Python
|
code/view/baseview.py
|
nicolgit/ZeroClock
|
b0590bf591c7e1c6968fd7b5445c5f21d00c6c4a
|
[
"MIT"
] | 2
|
2021-11-18T18:07:20.000Z
|
2021-11-18T18:10:25.000Z
|
code/view/baseview.py
|
nicolgit/zero-clock
|
b0590bf591c7e1c6968fd7b5445c5f21d00c6c4a
|
[
"MIT"
] | null | null | null |
code/view/baseview.py
|
nicolgit/zero-clock
|
b0590bf591c7e1c6968fd7b5445c5f21d00c6c4a
|
[
"MIT"
] | null | null | null |
class BaseView:
def prepare_image(self): raise NotImplementedError
def show_image(self): raise NotImplementedError
def show_centered_string(self, text, font, y = None): raise NotImplementedError
def show_welcome(self): raise NotImplementedError
| 19.857143
| 88
| 0.73741
| 31
| 278
| 6.451613
| 0.516129
| 0.48
| 0.42
| 0.465
| 0.4
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0.201439
| 278
| 13
| 89
| 21.384615
| 0.900901
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.8
| false
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.