body_hash
stringlengths
64
64
body
stringlengths
23
109k
docstring
stringlengths
1
57k
path
stringlengths
4
198
name
stringlengths
1
115
repository_name
stringlengths
7
111
repository_stars
float64
0
191k
lang
stringclasses
1 value
body_without_docstring
stringlengths
14
108k
unified
stringlengths
45
133k
c6d27716acbb01408df34c92b94ff96eb05e5b3d4676fb8ea6d39424baa4fa42
def load_ckpt(network, pretrain_ckpt_path, trainable=True): '\n incremental_learning or not\n ' param_dict = load_checkpoint(pretrain_ckpt_path) load_param_into_net(network, param_dict) if (not trainable): for param in network.get_parameters(): param.requires_grad = False
incremental_learning or not
model_zoo/research/nlp/dscnn/src/models.py
load_ckpt
limberc/mindspore
77
python
def load_ckpt(network, pretrain_ckpt_path, trainable=True): '\n \n ' param_dict = load_checkpoint(pretrain_ckpt_path) load_param_into_net(network, param_dict) if (not trainable): for param in network.get_parameters(): param.requires_grad = False
def load_ckpt(network, pretrain_ckpt_path, trainable=True): '\n \n ' param_dict = load_checkpoint(pretrain_ckpt_path) load_param_into_net(network, param_dict) if (not trainable): for param in network.get_parameters(): param.requires_grad = False<|docstring|>incremental_learning or not<|endoftext|>
9051c5ad184a2662774907ce7a13e35aa5a1729bb038ad87f3b53c2a459c49de
@classmethod def __getImgServiceByConfig(cls) -> ImgService: 'ๆ นๆฎๅฝ“ๅ‰็ณป็ปŸ้…็ฝฎ่ฟ”ๅ›žๅˆ้€‚็š„ๅ›พ็‰‡ๆœๅŠก' sysConfig = Config.getInstance() imgService = sysConfig.getConfigParam(Config.PARAM_IMG_SERVICE) return cls.getImgServiceByFlag(imgService)
ๆ นๆฎๅฝ“ๅ‰็ณป็ปŸ้…็ฝฎ่ฟ”ๅ›žๅˆ้€‚็š„ๅ›พ็‰‡ๆœๅŠก
src/markdown_img/img_service_manager.py
__getImgServiceByConfig
icexmoon/markdown-img
24
python
@classmethod def __getImgServiceByConfig(cls) -> ImgService: sysConfig = Config.getInstance() imgService = sysConfig.getConfigParam(Config.PARAM_IMG_SERVICE) return cls.getImgServiceByFlag(imgService)
@classmethod def __getImgServiceByConfig(cls) -> ImgService: sysConfig = Config.getInstance() imgService = sysConfig.getConfigParam(Config.PARAM_IMG_SERVICE) return cls.getImgServiceByFlag(imgService)<|docstring|>ๆ นๆฎๅฝ“ๅ‰็ณป็ปŸ้…็ฝฎ่ฟ”ๅ›žๅˆ้€‚็š„ๅ›พ็‰‡ๆœๅŠก<|endoftext|>
ff407919ce46b9fd31ace584429c12b80810def1e77ca5708d0619cdd22c8582
@classmethod def getImgServiceByFlag(cls, flag: str) -> ImgService: 'ๆ นๆฎๅ›พ็‰‡ๆœๅŠกๆ ‡่ฏ†่Žทๅ–็›ธๅบ”็š„ๅ›พ็‰‡ๆœๅŠก\n flag: ๅ›พ็‰‡ๆœๅŠกๆ ‡่ฏ†\n ' webImage: ImgService imgService = flag if (imgService == Config.IMG_SERVICE_ALI): webImage = YujianImgService(YujianImgService.API_TYPE_ALI) elif (imgService == Config.IMG_SERVICE_ALI2): webImage = YujianImgService(YujianImgService.API_TYPE_ALI) elif (imgService == Config.IMG_SERVICE_RRUU): webImage = NoneImgService() elif (imgService == Config.IMG_SERVICE_VIMCN): webImage = VimcnImgService() elif (imgService == Config.IMG_SERVICE_YUJIAN): webImage = YujianImgService() elif (imgService == Config.IMG_SERVICE_QCLOUD): webImage = QcloudImgService() elif (imgService == Config.IMG_SERVICE_QINIU): webImage = QiniuImgService() elif (imgService == Config.IMG_SERVICE_BILIBILI): webImage = YujianImgService(YujianImgService.API_TYPE_BILIBILI) elif (imgService == Config.IMG_SERVICE_360): webImage = YujianImgService(YujianImgService.API_TYPE_QIHOO) elif (imgService == Config.IMG_SERVICE_AI58): webImage = YujianImgService(YujianImgService.API_TYPE_AI58) elif (imgService == Config.IMG_SERVICE_SOUGOU): webImage = YujianImgService(YujianImgService.API_TYPE_SOUGOU) elif (imgService == Config.IMG_SERVICE_HULUXIA): webImage = YujianImgService(YujianImgService.API_TYPE_HULUXIA) elif (imgService == Config.IMG_SERVICE_CATBOX): webImage = YujianImgService(YujianImgService.API_TYPE_CATBOX) elif (imgService == Config.IMG_SERVICE_POSTIMAGES): webImage = YujianImgService(YujianImgService.API_TYPE_POSTIMAGES) elif (imgService == Config.IMG_SERVICE_GTIMG): webImage = YujianImgService(YujianImgService.API_TYPE_GTIMG) elif (imgService == Config.IMG_SERVICE_BKIMG): webImage = YujianImgService(YujianImgService.API_TYPE_BKIMG) elif (imgService == Config.IMG_SERVICE_MUKE): webImage = YujianImgService(YujianImgService.API_TYPE_MUKE) elif (imgService == Config.IMG_SERVICE_UPYUN): webImage = UpyunImgService() else: webImage = SmmsImgService() return webImage
ๆ นๆฎๅ›พ็‰‡ๆœๅŠกๆ ‡่ฏ†่Žทๅ–็›ธๅบ”็š„ๅ›พ็‰‡ๆœๅŠก flag: ๅ›พ็‰‡ๆœๅŠกๆ ‡่ฏ†
src/markdown_img/img_service_manager.py
getImgServiceByFlag
icexmoon/markdown-img
24
python
@classmethod def getImgServiceByFlag(cls, flag: str) -> ImgService: 'ๆ นๆฎๅ›พ็‰‡ๆœๅŠกๆ ‡่ฏ†่Žทๅ–็›ธๅบ”็š„ๅ›พ็‰‡ๆœๅŠก\n flag: ๅ›พ็‰‡ๆœๅŠกๆ ‡่ฏ†\n ' webImage: ImgService imgService = flag if (imgService == Config.IMG_SERVICE_ALI): webImage = YujianImgService(YujianImgService.API_TYPE_ALI) elif (imgService == Config.IMG_SERVICE_ALI2): webImage = YujianImgService(YujianImgService.API_TYPE_ALI) elif (imgService == Config.IMG_SERVICE_RRUU): webImage = NoneImgService() elif (imgService == Config.IMG_SERVICE_VIMCN): webImage = VimcnImgService() elif (imgService == Config.IMG_SERVICE_YUJIAN): webImage = YujianImgService() elif (imgService == Config.IMG_SERVICE_QCLOUD): webImage = QcloudImgService() elif (imgService == Config.IMG_SERVICE_QINIU): webImage = QiniuImgService() elif (imgService == Config.IMG_SERVICE_BILIBILI): webImage = YujianImgService(YujianImgService.API_TYPE_BILIBILI) elif (imgService == Config.IMG_SERVICE_360): webImage = YujianImgService(YujianImgService.API_TYPE_QIHOO) elif (imgService == Config.IMG_SERVICE_AI58): webImage = YujianImgService(YujianImgService.API_TYPE_AI58) elif (imgService == Config.IMG_SERVICE_SOUGOU): webImage = YujianImgService(YujianImgService.API_TYPE_SOUGOU) elif (imgService == Config.IMG_SERVICE_HULUXIA): webImage = YujianImgService(YujianImgService.API_TYPE_HULUXIA) elif (imgService == Config.IMG_SERVICE_CATBOX): webImage = YujianImgService(YujianImgService.API_TYPE_CATBOX) elif (imgService == Config.IMG_SERVICE_POSTIMAGES): webImage = YujianImgService(YujianImgService.API_TYPE_POSTIMAGES) elif (imgService == Config.IMG_SERVICE_GTIMG): webImage = YujianImgService(YujianImgService.API_TYPE_GTIMG) elif (imgService == Config.IMG_SERVICE_BKIMG): webImage = YujianImgService(YujianImgService.API_TYPE_BKIMG) elif (imgService == Config.IMG_SERVICE_MUKE): webImage = YujianImgService(YujianImgService.API_TYPE_MUKE) elif (imgService == Config.IMG_SERVICE_UPYUN): webImage = UpyunImgService() else: webImage = SmmsImgService() return webImage
@classmethod def getImgServiceByFlag(cls, flag: str) -> ImgService: 'ๆ นๆฎๅ›พ็‰‡ๆœๅŠกๆ ‡่ฏ†่Žทๅ–็›ธๅบ”็š„ๅ›พ็‰‡ๆœๅŠก\n flag: ๅ›พ็‰‡ๆœๅŠกๆ ‡่ฏ†\n ' webImage: ImgService imgService = flag if (imgService == Config.IMG_SERVICE_ALI): webImage = YujianImgService(YujianImgService.API_TYPE_ALI) elif (imgService == Config.IMG_SERVICE_ALI2): webImage = YujianImgService(YujianImgService.API_TYPE_ALI) elif (imgService == Config.IMG_SERVICE_RRUU): webImage = NoneImgService() elif (imgService == Config.IMG_SERVICE_VIMCN): webImage = VimcnImgService() elif (imgService == Config.IMG_SERVICE_YUJIAN): webImage = YujianImgService() elif (imgService == Config.IMG_SERVICE_QCLOUD): webImage = QcloudImgService() elif (imgService == Config.IMG_SERVICE_QINIU): webImage = QiniuImgService() elif (imgService == Config.IMG_SERVICE_BILIBILI): webImage = YujianImgService(YujianImgService.API_TYPE_BILIBILI) elif (imgService == Config.IMG_SERVICE_360): webImage = YujianImgService(YujianImgService.API_TYPE_QIHOO) elif (imgService == Config.IMG_SERVICE_AI58): webImage = YujianImgService(YujianImgService.API_TYPE_AI58) elif (imgService == Config.IMG_SERVICE_SOUGOU): webImage = YujianImgService(YujianImgService.API_TYPE_SOUGOU) elif (imgService == Config.IMG_SERVICE_HULUXIA): webImage = YujianImgService(YujianImgService.API_TYPE_HULUXIA) elif (imgService == Config.IMG_SERVICE_CATBOX): webImage = YujianImgService(YujianImgService.API_TYPE_CATBOX) elif (imgService == Config.IMG_SERVICE_POSTIMAGES): webImage = YujianImgService(YujianImgService.API_TYPE_POSTIMAGES) elif (imgService == Config.IMG_SERVICE_GTIMG): webImage = YujianImgService(YujianImgService.API_TYPE_GTIMG) elif (imgService == Config.IMG_SERVICE_BKIMG): webImage = YujianImgService(YujianImgService.API_TYPE_BKIMG) elif (imgService == Config.IMG_SERVICE_MUKE): webImage = YujianImgService(YujianImgService.API_TYPE_MUKE) elif (imgService == Config.IMG_SERVICE_UPYUN): webImage = UpyunImgService() else: webImage = SmmsImgService() return webImage<|docstring|>ๆ นๆฎๅ›พ็‰‡ๆœๅŠกๆ ‡่ฏ†่Žทๅ–็›ธๅบ”็š„ๅ›พ็‰‡ๆœๅŠก flag: ๅ›พ็‰‡ๆœๅŠกๆ ‡่ฏ†<|endoftext|>
3d0684f0cf4e8f60f28abad76f8d423d33e99da00d9f3c87e14eca7d67ccfc1c
@classmethod def isValidImgServiceFlag(cls, flag: str) -> bool: 'ๆ˜ฏๅฆไธบๅˆๆณ•็š„ๅ›พ็‰‡ๆœๅŠกๆ ‡่ฏ†\n flag: ๅ›พ็‰‡ๆœๅŠกๆ ‡่ฏ†\n ' supportedService = {Config.IMG_SERVICE_SMMS, Config.IMG_SERVICE_ALI, Config.IMG_SERVICE_RRUU, Config.IMG_SERVICE_VIMCN, Config.IMG_SERVICE_YUJIAN, Config.IMG_SERVICE_ALI2, Config.IMG_SERVICE_QCLOUD, Config.IMG_SERVICE_QINIU, Config.IMG_SERVICE_BILIBILI, Config.IMG_SERVICE_SOUGOU, Config.IMG_SERVICE_HULUXIA, Config.IMG_SERVICE_CATBOX, Config.IMG_SERVICE_360, Config.IMG_SERVICE_POSTIMAGES, Config.IMG_SERVICE_AI58, Config.IMG_SERVICE_GTIMG, Config.IMG_SERVICE_BKIMG, Config.IMG_SERVICE_MUKE, Config.IMG_SERVICE_UPYUN} if (flag in supportedService): return True return False
ๆ˜ฏๅฆไธบๅˆๆณ•็š„ๅ›พ็‰‡ๆœๅŠกๆ ‡่ฏ† flag: ๅ›พ็‰‡ๆœๅŠกๆ ‡่ฏ†
src/markdown_img/img_service_manager.py
isValidImgServiceFlag
icexmoon/markdown-img
24
python
@classmethod def isValidImgServiceFlag(cls, flag: str) -> bool: 'ๆ˜ฏๅฆไธบๅˆๆณ•็š„ๅ›พ็‰‡ๆœๅŠกๆ ‡่ฏ†\n flag: ๅ›พ็‰‡ๆœๅŠกๆ ‡่ฏ†\n ' supportedService = {Config.IMG_SERVICE_SMMS, Config.IMG_SERVICE_ALI, Config.IMG_SERVICE_RRUU, Config.IMG_SERVICE_VIMCN, Config.IMG_SERVICE_YUJIAN, Config.IMG_SERVICE_ALI2, Config.IMG_SERVICE_QCLOUD, Config.IMG_SERVICE_QINIU, Config.IMG_SERVICE_BILIBILI, Config.IMG_SERVICE_SOUGOU, Config.IMG_SERVICE_HULUXIA, Config.IMG_SERVICE_CATBOX, Config.IMG_SERVICE_360, Config.IMG_SERVICE_POSTIMAGES, Config.IMG_SERVICE_AI58, Config.IMG_SERVICE_GTIMG, Config.IMG_SERVICE_BKIMG, Config.IMG_SERVICE_MUKE, Config.IMG_SERVICE_UPYUN} if (flag in supportedService): return True return False
@classmethod def isValidImgServiceFlag(cls, flag: str) -> bool: 'ๆ˜ฏๅฆไธบๅˆๆณ•็š„ๅ›พ็‰‡ๆœๅŠกๆ ‡่ฏ†\n flag: ๅ›พ็‰‡ๆœๅŠกๆ ‡่ฏ†\n ' supportedService = {Config.IMG_SERVICE_SMMS, Config.IMG_SERVICE_ALI, Config.IMG_SERVICE_RRUU, Config.IMG_SERVICE_VIMCN, Config.IMG_SERVICE_YUJIAN, Config.IMG_SERVICE_ALI2, Config.IMG_SERVICE_QCLOUD, Config.IMG_SERVICE_QINIU, Config.IMG_SERVICE_BILIBILI, Config.IMG_SERVICE_SOUGOU, Config.IMG_SERVICE_HULUXIA, Config.IMG_SERVICE_CATBOX, Config.IMG_SERVICE_360, Config.IMG_SERVICE_POSTIMAGES, Config.IMG_SERVICE_AI58, Config.IMG_SERVICE_GTIMG, Config.IMG_SERVICE_BKIMG, Config.IMG_SERVICE_MUKE, Config.IMG_SERVICE_UPYUN} if (flag in supportedService): return True return False<|docstring|>ๆ˜ฏๅฆไธบๅˆๆณ•็š„ๅ›พ็‰‡ๆœๅŠกๆ ‡่ฏ† flag: ๅ›พ็‰‡ๆœๅŠกๆ ‡่ฏ†<|endoftext|>
e0996eea3ca0d7a838b58db13e6952f59ce6b9b671f885ad0c4bb4496d5085da
def glInitFramebufferMultisampleBlitScaledEXT(): 'Return boolean indicating whether this extension is available' from OpenGL import extensions return extensions.hasGLExtension(_EXTENSION_NAME)
Return boolean indicating whether this extension is available
OpenGL/GL/EXT/framebuffer_multisample_blit_scaled.py
glInitFramebufferMultisampleBlitScaledEXT
keunhong/pyopengl
210
python
def glInitFramebufferMultisampleBlitScaledEXT(): from OpenGL import extensions return extensions.hasGLExtension(_EXTENSION_NAME)
def glInitFramebufferMultisampleBlitScaledEXT(): from OpenGL import extensions return extensions.hasGLExtension(_EXTENSION_NAME)<|docstring|>Return boolean indicating whether this extension is available<|endoftext|>
25f0c0ea1355985c9c33444873c8c6cc32e976dd4c3c17f9f02dba7eedddd805
def on_timeout(self, user_data): '\n Update value on the progress bar\n ' if self.activity_mode: self.progressbar.pulse() else: new_value = (self.progressbar.get_fraction() + 0.01) if (new_value > 1): new_value = 0 self.progressbar.set_fraction(new_value) return True
Update value on the progress bar
gym_pcgrl/gym_pcgrl/conditional_window.py
on_timeout
JiangZehua/control-pcgrl3D
15
python
def on_timeout(self, user_data): '\n \n ' if self.activity_mode: self.progressbar.pulse() else: new_value = (self.progressbar.get_fraction() + 0.01) if (new_value > 1): new_value = 0 self.progressbar.set_fraction(new_value) return True
def on_timeout(self, user_data): '\n \n ' if self.activity_mode: self.progressbar.pulse() else: new_value = (self.progressbar.get_fraction() + 0.01) if (new_value > 1): new_value = 0 self.progressbar.set_fraction(new_value) return True<|docstring|>Update value on the progress bar<|endoftext|>
34d8efa607b3b51b7943b7744b8e005312c8a6417025bb612b08dbff9e9c3da0
def parse_darts_log(log_path, key_point='ea_acc'): '\n report vaild\n ' collect = [] for l in open(log_path).readlines(): l = l.strip('/n') if ('args = Namespace' in l): collect = [] if (key_point in l): metirc = float(l.split(key_point)[(- 1)]) print(metirc) collect.append(metirc) print(collect)
report vaild
Model_speed/FLOPs.py
parse_darts_log
Yanjun-Chen/Python-Tools
1
python
def parse_darts_log(log_path, key_point='ea_acc'): '\n \n ' collect = [] for l in open(log_path).readlines(): l = l.strip('/n') if ('args = Namespace' in l): collect = [] if (key_point in l): metirc = float(l.split(key_point)[(- 1)]) print(metirc) collect.append(metirc) print(collect)
def parse_darts_log(log_path, key_point='ea_acc'): '\n \n ' collect = [] for l in open(log_path).readlines(): l = l.strip('/n') if ('args = Namespace' in l): collect = [] if (key_point in l): metirc = float(l.split(key_point)[(- 1)]) print(metirc) collect.append(metirc) print(collect)<|docstring|>report vaild<|endoftext|>
3196c93d357e21b267d381af0f8813cc4ee38c1e3a7810a9cfdd4abb13306b19
def parse_vs(log_path): '\n report vaild\n ' previous = [] current = [] for l in open(log_path).readlines(): l = l.strip('/n') if ('args = Namespace' in l): previous = [] current = [] if ('previous_vs_current' in l): import pdb pdb.set_trace() p = float(l.split('previous_vs_current')[0].split(' ')[(- 1)]) c = float(l.split('previous_vs_current')[(- 1)].split(' ')[0]) print(metirc) collect.append(metirc) print(collect)
report vaild
Model_speed/FLOPs.py
parse_vs
Yanjun-Chen/Python-Tools
1
python
def parse_vs(log_path): '\n \n ' previous = [] current = [] for l in open(log_path).readlines(): l = l.strip('/n') if ('args = Namespace' in l): previous = [] current = [] if ('previous_vs_current' in l): import pdb pdb.set_trace() p = float(l.split('previous_vs_current')[0].split(' ')[(- 1)]) c = float(l.split('previous_vs_current')[(- 1)].split(' ')[0]) print(metirc) collect.append(metirc) print(collect)
def parse_vs(log_path): '\n \n ' previous = [] current = [] for l in open(log_path).readlines(): l = l.strip('/n') if ('args = Namespace' in l): previous = [] current = [] if ('previous_vs_current' in l): import pdb pdb.set_trace() p = float(l.split('previous_vs_current')[0].split(' ')[(- 1)]) c = float(l.split('previous_vs_current')[(- 1)].split(' ')[0]) print(metirc) collect.append(metirc) print(collect)<|docstring|>report vaild<|endoftext|>
f85cd45012a2a2862327ddb6fc817c19495668c25385e43dc83e871ea610f569
def get_lantacy(arch=None, l_limit=8000, h_limit=15000): '\n only support sfn1 oneshot\n ' if (arch is None): arch = tuple((np.random.randint(4) for i in range(16))) assert (len(arch) == 16) lantacy_map = [[581.0, 741.0, 832.0, 1373.0], [450.0, 549.0, 781.0, 877.0], [402.0, 499.0, 515.0, 742.0], [473.0, 673.0, 647.0, 772.0], [550.0, 553.0, 739.0, 821.0], [450.0, 428.0, 551.0, 472.0], [271.0, 408.0, 405.0, 519.0], [342.0, 388.0, 472.0, 437.0], [347.0, 429.0, 483.0, 446.0], [309.0, 365.0, 481.0, 451.0], [425.0, 461.0, 495.0, 502.0], [276.0, 377.0, 434.0, 452.0], [391.0, 415.0, 413.0, 594.0], [197.0, 289.0, 274.0, 363.0], [148.0, 149.0, 301.0, 350.0], [238.0, 272.0, 221.0, 457.0]] stem = 4282 classifer = 408 limit = 12000 arch_lantacy = (stem + classifer) for (layer_id, ops_id) in enumerate(arch): arch_lantacy += lantacy_map[layer_id][ops_id] return (arch_lantacy, ((arch_lantacy < h_limit) and (arch_lantacy > l_limit)))
only support sfn1 oneshot
Model_speed/FLOPs.py
get_lantacy
Yanjun-Chen/Python-Tools
1
python
def get_lantacy(arch=None, l_limit=8000, h_limit=15000): '\n \n ' if (arch is None): arch = tuple((np.random.randint(4) for i in range(16))) assert (len(arch) == 16) lantacy_map = [[581.0, 741.0, 832.0, 1373.0], [450.0, 549.0, 781.0, 877.0], [402.0, 499.0, 515.0, 742.0], [473.0, 673.0, 647.0, 772.0], [550.0, 553.0, 739.0, 821.0], [450.0, 428.0, 551.0, 472.0], [271.0, 408.0, 405.0, 519.0], [342.0, 388.0, 472.0, 437.0], [347.0, 429.0, 483.0, 446.0], [309.0, 365.0, 481.0, 451.0], [425.0, 461.0, 495.0, 502.0], [276.0, 377.0, 434.0, 452.0], [391.0, 415.0, 413.0, 594.0], [197.0, 289.0, 274.0, 363.0], [148.0, 149.0, 301.0, 350.0], [238.0, 272.0, 221.0, 457.0]] stem = 4282 classifer = 408 limit = 12000 arch_lantacy = (stem + classifer) for (layer_id, ops_id) in enumerate(arch): arch_lantacy += lantacy_map[layer_id][ops_id] return (arch_lantacy, ((arch_lantacy < h_limit) and (arch_lantacy > l_limit)))
def get_lantacy(arch=None, l_limit=8000, h_limit=15000): '\n \n ' if (arch is None): arch = tuple((np.random.randint(4) for i in range(16))) assert (len(arch) == 16) lantacy_map = [[581.0, 741.0, 832.0, 1373.0], [450.0, 549.0, 781.0, 877.0], [402.0, 499.0, 515.0, 742.0], [473.0, 673.0, 647.0, 772.0], [550.0, 553.0, 739.0, 821.0], [450.0, 428.0, 551.0, 472.0], [271.0, 408.0, 405.0, 519.0], [342.0, 388.0, 472.0, 437.0], [347.0, 429.0, 483.0, 446.0], [309.0, 365.0, 481.0, 451.0], [425.0, 461.0, 495.0, 502.0], [276.0, 377.0, 434.0, 452.0], [391.0, 415.0, 413.0, 594.0], [197.0, 289.0, 274.0, 363.0], [148.0, 149.0, 301.0, 350.0], [238.0, 272.0, 221.0, 457.0]] stem = 4282 classifer = 408 limit = 12000 arch_lantacy = (stem + classifer) for (layer_id, ops_id) in enumerate(arch): arch_lantacy += lantacy_map[layer_id][ops_id] return (arch_lantacy, ((arch_lantacy < h_limit) and (arch_lantacy > l_limit)))<|docstring|>only support sfn1 oneshot<|endoftext|>
453606b32e291856ac04ecdf7429ae4dd7b8d12607698116fd85325c3c98cc25
def __init__(self): '\n Main execution path\n ' self.inventory = dict() self.hosts = dict() self.parse_cli_args() self.read_settings() if (self.args.refresh_cache or (not self.is_cache_valid())): self.update_cache() else: self.load_inventory_from_cache() self.load_hosts_from_cache() data_to_print = '' if self.args.host: if self.args.debug: print(('Fetching host [%s]' % self.args.host)) data_to_print += self.get_host_info(self.args.host) else: self.inventory['_meta'] = {'hostvars': {}} for hostname in self.hosts: self.inventory['_meta']['hostvars'][hostname] = {'cloudforms': self.hosts[hostname]} if ('ansible_ssh_host' in self.hosts[hostname]): self.inventory['_meta']['hostvars'][hostname]['ansible_ssh_host'] = self.hosts[hostname]['ansible_ssh_host'] data_to_print += self.json_format_dict(self.inventory, self.args.pretty) print(data_to_print)
Main execution path
awx/plugins/inventory/cloudforms.py
__init__
jmferrer/awx
37
python
def __init__(self): '\n \n ' self.inventory = dict() self.hosts = dict() self.parse_cli_args() self.read_settings() if (self.args.refresh_cache or (not self.is_cache_valid())): self.update_cache() else: self.load_inventory_from_cache() self.load_hosts_from_cache() data_to_print = if self.args.host: if self.args.debug: print(('Fetching host [%s]' % self.args.host)) data_to_print += self.get_host_info(self.args.host) else: self.inventory['_meta'] = {'hostvars': {}} for hostname in self.hosts: self.inventory['_meta']['hostvars'][hostname] = {'cloudforms': self.hosts[hostname]} if ('ansible_ssh_host' in self.hosts[hostname]): self.inventory['_meta']['hostvars'][hostname]['ansible_ssh_host'] = self.hosts[hostname]['ansible_ssh_host'] data_to_print += self.json_format_dict(self.inventory, self.args.pretty) print(data_to_print)
def __init__(self): '\n \n ' self.inventory = dict() self.hosts = dict() self.parse_cli_args() self.read_settings() if (self.args.refresh_cache or (not self.is_cache_valid())): self.update_cache() else: self.load_inventory_from_cache() self.load_hosts_from_cache() data_to_print = if self.args.host: if self.args.debug: print(('Fetching host [%s]' % self.args.host)) data_to_print += self.get_host_info(self.args.host) else: self.inventory['_meta'] = {'hostvars': {}} for hostname in self.hosts: self.inventory['_meta']['hostvars'][hostname] = {'cloudforms': self.hosts[hostname]} if ('ansible_ssh_host' in self.hosts[hostname]): self.inventory['_meta']['hostvars'][hostname]['ansible_ssh_host'] = self.hosts[hostname]['ansible_ssh_host'] data_to_print += self.json_format_dict(self.inventory, self.args.pretty) print(data_to_print)<|docstring|>Main execution path<|endoftext|>
a3be07e799d90c6cbb7eedffb12ca5d1b399bd68426aa7319da09cc77d6254e2
def is_cache_valid(self): '\n Determines if the cache files have expired, or if it is still valid\n ' if self.args.debug: print(('Determining if cache [%s] is still valid (< %s seconds old)' % (self.cache_path_hosts, self.cache_max_age))) if os.path.isfile(self.cache_path_hosts): mod_time = os.path.getmtime(self.cache_path_hosts) current_time = time() if ((mod_time + self.cache_max_age) > current_time): if os.path.isfile(self.cache_path_inventory): if self.args.debug: print('Cache is still valid!') return True if self.args.debug: print('Cache is stale or does not exist.') return False
Determines if the cache files have expired, or if it is still valid
awx/plugins/inventory/cloudforms.py
is_cache_valid
jmferrer/awx
37
python
def is_cache_valid(self): '\n \n ' if self.args.debug: print(('Determining if cache [%s] is still valid (< %s seconds old)' % (self.cache_path_hosts, self.cache_max_age))) if os.path.isfile(self.cache_path_hosts): mod_time = os.path.getmtime(self.cache_path_hosts) current_time = time() if ((mod_time + self.cache_max_age) > current_time): if os.path.isfile(self.cache_path_inventory): if self.args.debug: print('Cache is still valid!') return True if self.args.debug: print('Cache is stale or does not exist.') return False
def is_cache_valid(self): '\n \n ' if self.args.debug: print(('Determining if cache [%s] is still valid (< %s seconds old)' % (self.cache_path_hosts, self.cache_max_age))) if os.path.isfile(self.cache_path_hosts): mod_time = os.path.getmtime(self.cache_path_hosts) current_time = time() if ((mod_time + self.cache_max_age) > current_time): if os.path.isfile(self.cache_path_inventory): if self.args.debug: print('Cache is still valid!') return True if self.args.debug: print('Cache is stale or does not exist.') return False<|docstring|>Determines if the cache files have expired, or if it is still valid<|endoftext|>
1b3af32709674921911ce128378fbe69cac6ed1efbad7d53633a538dae58fd7d
def read_settings(self): '\n Reads the settings from the cloudforms.ini file\n ' config = ConfigParser.SafeConfigParser() config_paths = [(os.path.dirname(os.path.realpath(__file__)) + '/cloudforms.ini'), '/etc/ansible/cloudforms.ini'] env_value = os.environ.get('CLOUDFORMS_INI_PATH') if (env_value is not None): config_paths.append(os.path.expanduser(os.path.expandvars(env_value))) if self.args.debug: for config_path in config_paths: print(('Reading from configuration file [%s]' % config_path)) config.read(config_paths) if config.has_option('cloudforms', 'url'): self.cloudforms_url = config.get('cloudforms', 'url') else: self.cloudforms_url = None if (not self.cloudforms_url): warnings.warn("No url specified, expected something like 'https://cfme.example.com'") if config.has_option('cloudforms', 'username'): self.cloudforms_username = config.get('cloudforms', 'username') else: self.cloudforms_username = None if (not self.cloudforms_username): warnings.warn('No username specified, you need to specify a CloudForms username.') if config.has_option('cloudforms', 'password'): self.cloudforms_pw = config.get('cloudforms', 'password', raw=True) else: self.cloudforms_pw = None if (not self.cloudforms_pw): warnings.warn('No password specified, you need to specify a password for the CloudForms user.') if config.has_option('cloudforms', 'ssl_verify'): self.cloudforms_ssl_verify = config.getboolean('cloudforms', 'ssl_verify') else: self.cloudforms_ssl_verify = True if config.has_option('cloudforms', 'version'): self.cloudforms_version = config.get('cloudforms', 'version') else: self.cloudforms_version = None if config.has_option('cloudforms', 'limit'): self.cloudforms_limit = config.getint('cloudforms', 'limit') else: self.cloudforms_limit = 100 if config.has_option('cloudforms', 'purge_actions'): self.cloudforms_purge_actions = config.getboolean('cloudforms', 'purge_actions') else: self.cloudforms_purge_actions = True if config.has_option('cloudforms', 'clean_group_keys'): self.cloudforms_clean_group_keys = config.getboolean('cloudforms', 'clean_group_keys') else: self.cloudforms_clean_group_keys = True if config.has_option('cloudforms', 'nest_tags'): self.cloudforms_nest_tags = config.getboolean('cloudforms', 'nest_tags') else: self.cloudforms_nest_tags = False if config.has_option('cloudforms', 'suffix'): self.cloudforms_suffix = config.get('cloudforms', 'suffix') if (self.cloudforms_suffix[0] != '.'): raise AnsibleError('Leading fullstop is required for Cloudforms suffix') else: self.cloudforms_suffix = None if config.has_option('cloudforms', 'prefer_ipv4'): self.cloudforms_prefer_ipv4 = config.getboolean('cloudforms', 'prefer_ipv4') else: self.cloudforms_prefer_ipv4 = False try: group_patterns = config.get('ansible', 'group_patterns') except (ConfigParser.NoOptionError, ConfigParser.NoSectionError): group_patterns = '[]' self.group_patterns = eval(group_patterns) try: cache_path = os.path.expanduser(config.get('cache', 'path')) except (ConfigParser.NoOptionError, ConfigParser.NoSectionError): cache_path = '.' (script, ext) = os.path.splitext(os.path.basename(__file__)) self.cache_path_hosts = (cache_path + ('/%s.hosts' % script)) self.cache_path_inventory = (cache_path + ('/%s.inventory' % script)) self.cache_max_age = config.getint('cache', 'max_age') if self.args.debug: print('CloudForms settings:') print(('cloudforms_url = %s' % self.cloudforms_url)) print(('cloudforms_username = %s' % self.cloudforms_username)) print(('cloudforms_pw = %s' % self.cloudforms_pw)) print(('cloudforms_ssl_verify = %s' % self.cloudforms_ssl_verify)) print(('cloudforms_version = %s' % self.cloudforms_version)) print(('cloudforms_limit = %s' % self.cloudforms_limit)) print(('cloudforms_purge_actions = %s' % self.cloudforms_purge_actions)) print('Cache settings:') print(('cache_max_age = %s' % self.cache_max_age)) print(('cache_path_hosts = %s' % self.cache_path_hosts)) print(('cache_path_inventory = %s' % self.cache_path_inventory))
Reads the settings from the cloudforms.ini file
awx/plugins/inventory/cloudforms.py
read_settings
jmferrer/awx
37
python
def read_settings(self): '\n \n ' config = ConfigParser.SafeConfigParser() config_paths = [(os.path.dirname(os.path.realpath(__file__)) + '/cloudforms.ini'), '/etc/ansible/cloudforms.ini'] env_value = os.environ.get('CLOUDFORMS_INI_PATH') if (env_value is not None): config_paths.append(os.path.expanduser(os.path.expandvars(env_value))) if self.args.debug: for config_path in config_paths: print(('Reading from configuration file [%s]' % config_path)) config.read(config_paths) if config.has_option('cloudforms', 'url'): self.cloudforms_url = config.get('cloudforms', 'url') else: self.cloudforms_url = None if (not self.cloudforms_url): warnings.warn("No url specified, expected something like 'https://cfme.example.com'") if config.has_option('cloudforms', 'username'): self.cloudforms_username = config.get('cloudforms', 'username') else: self.cloudforms_username = None if (not self.cloudforms_username): warnings.warn('No username specified, you need to specify a CloudForms username.') if config.has_option('cloudforms', 'password'): self.cloudforms_pw = config.get('cloudforms', 'password', raw=True) else: self.cloudforms_pw = None if (not self.cloudforms_pw): warnings.warn('No password specified, you need to specify a password for the CloudForms user.') if config.has_option('cloudforms', 'ssl_verify'): self.cloudforms_ssl_verify = config.getboolean('cloudforms', 'ssl_verify') else: self.cloudforms_ssl_verify = True if config.has_option('cloudforms', 'version'): self.cloudforms_version = config.get('cloudforms', 'version') else: self.cloudforms_version = None if config.has_option('cloudforms', 'limit'): self.cloudforms_limit = config.getint('cloudforms', 'limit') else: self.cloudforms_limit = 100 if config.has_option('cloudforms', 'purge_actions'): self.cloudforms_purge_actions = config.getboolean('cloudforms', 'purge_actions') else: self.cloudforms_purge_actions = True if config.has_option('cloudforms', 'clean_group_keys'): self.cloudforms_clean_group_keys = config.getboolean('cloudforms', 'clean_group_keys') else: self.cloudforms_clean_group_keys = True if config.has_option('cloudforms', 'nest_tags'): self.cloudforms_nest_tags = config.getboolean('cloudforms', 'nest_tags') else: self.cloudforms_nest_tags = False if config.has_option('cloudforms', 'suffix'): self.cloudforms_suffix = config.get('cloudforms', 'suffix') if (self.cloudforms_suffix[0] != '.'): raise AnsibleError('Leading fullstop is required for Cloudforms suffix') else: self.cloudforms_suffix = None if config.has_option('cloudforms', 'prefer_ipv4'): self.cloudforms_prefer_ipv4 = config.getboolean('cloudforms', 'prefer_ipv4') else: self.cloudforms_prefer_ipv4 = False try: group_patterns = config.get('ansible', 'group_patterns') except (ConfigParser.NoOptionError, ConfigParser.NoSectionError): group_patterns = '[]' self.group_patterns = eval(group_patterns) try: cache_path = os.path.expanduser(config.get('cache', 'path')) except (ConfigParser.NoOptionError, ConfigParser.NoSectionError): cache_path = '.' (script, ext) = os.path.splitext(os.path.basename(__file__)) self.cache_path_hosts = (cache_path + ('/%s.hosts' % script)) self.cache_path_inventory = (cache_path + ('/%s.inventory' % script)) self.cache_max_age = config.getint('cache', 'max_age') if self.args.debug: print('CloudForms settings:') print(('cloudforms_url = %s' % self.cloudforms_url)) print(('cloudforms_username = %s' % self.cloudforms_username)) print(('cloudforms_pw = %s' % self.cloudforms_pw)) print(('cloudforms_ssl_verify = %s' % self.cloudforms_ssl_verify)) print(('cloudforms_version = %s' % self.cloudforms_version)) print(('cloudforms_limit = %s' % self.cloudforms_limit)) print(('cloudforms_purge_actions = %s' % self.cloudforms_purge_actions)) print('Cache settings:') print(('cache_max_age = %s' % self.cache_max_age)) print(('cache_path_hosts = %s' % self.cache_path_hosts)) print(('cache_path_inventory = %s' % self.cache_path_inventory))
def read_settings(self): '\n \n ' config = ConfigParser.SafeConfigParser() config_paths = [(os.path.dirname(os.path.realpath(__file__)) + '/cloudforms.ini'), '/etc/ansible/cloudforms.ini'] env_value = os.environ.get('CLOUDFORMS_INI_PATH') if (env_value is not None): config_paths.append(os.path.expanduser(os.path.expandvars(env_value))) if self.args.debug: for config_path in config_paths: print(('Reading from configuration file [%s]' % config_path)) config.read(config_paths) if config.has_option('cloudforms', 'url'): self.cloudforms_url = config.get('cloudforms', 'url') else: self.cloudforms_url = None if (not self.cloudforms_url): warnings.warn("No url specified, expected something like 'https://cfme.example.com'") if config.has_option('cloudforms', 'username'): self.cloudforms_username = config.get('cloudforms', 'username') else: self.cloudforms_username = None if (not self.cloudforms_username): warnings.warn('No username specified, you need to specify a CloudForms username.') if config.has_option('cloudforms', 'password'): self.cloudforms_pw = config.get('cloudforms', 'password', raw=True) else: self.cloudforms_pw = None if (not self.cloudforms_pw): warnings.warn('No password specified, you need to specify a password for the CloudForms user.') if config.has_option('cloudforms', 'ssl_verify'): self.cloudforms_ssl_verify = config.getboolean('cloudforms', 'ssl_verify') else: self.cloudforms_ssl_verify = True if config.has_option('cloudforms', 'version'): self.cloudforms_version = config.get('cloudforms', 'version') else: self.cloudforms_version = None if config.has_option('cloudforms', 'limit'): self.cloudforms_limit = config.getint('cloudforms', 'limit') else: self.cloudforms_limit = 100 if config.has_option('cloudforms', 'purge_actions'): self.cloudforms_purge_actions = config.getboolean('cloudforms', 'purge_actions') else: self.cloudforms_purge_actions = True if config.has_option('cloudforms', 'clean_group_keys'): self.cloudforms_clean_group_keys = config.getboolean('cloudforms', 'clean_group_keys') else: self.cloudforms_clean_group_keys = True if config.has_option('cloudforms', 'nest_tags'): self.cloudforms_nest_tags = config.getboolean('cloudforms', 'nest_tags') else: self.cloudforms_nest_tags = False if config.has_option('cloudforms', 'suffix'): self.cloudforms_suffix = config.get('cloudforms', 'suffix') if (self.cloudforms_suffix[0] != '.'): raise AnsibleError('Leading fullstop is required for Cloudforms suffix') else: self.cloudforms_suffix = None if config.has_option('cloudforms', 'prefer_ipv4'): self.cloudforms_prefer_ipv4 = config.getboolean('cloudforms', 'prefer_ipv4') else: self.cloudforms_prefer_ipv4 = False try: group_patterns = config.get('ansible', 'group_patterns') except (ConfigParser.NoOptionError, ConfigParser.NoSectionError): group_patterns = '[]' self.group_patterns = eval(group_patterns) try: cache_path = os.path.expanduser(config.get('cache', 'path')) except (ConfigParser.NoOptionError, ConfigParser.NoSectionError): cache_path = '.' (script, ext) = os.path.splitext(os.path.basename(__file__)) self.cache_path_hosts = (cache_path + ('/%s.hosts' % script)) self.cache_path_inventory = (cache_path + ('/%s.inventory' % script)) self.cache_max_age = config.getint('cache', 'max_age') if self.args.debug: print('CloudForms settings:') print(('cloudforms_url = %s' % self.cloudforms_url)) print(('cloudforms_username = %s' % self.cloudforms_username)) print(('cloudforms_pw = %s' % self.cloudforms_pw)) print(('cloudforms_ssl_verify = %s' % self.cloudforms_ssl_verify)) print(('cloudforms_version = %s' % self.cloudforms_version)) print(('cloudforms_limit = %s' % self.cloudforms_limit)) print(('cloudforms_purge_actions = %s' % self.cloudforms_purge_actions)) print('Cache settings:') print(('cache_max_age = %s' % self.cache_max_age)) print(('cache_path_hosts = %s' % self.cache_path_hosts)) print(('cache_path_inventory = %s' % self.cache_path_inventory))<|docstring|>Reads the settings from the cloudforms.ini file<|endoftext|>
6f9dbc295c10f20eea70ae5b25c5efc61c1c1b5fb20948f09b483992e42853ec
def parse_cli_args(self): '\n Command line argument processing\n ' parser = argparse.ArgumentParser(description='Produce an Ansible Inventory file based on CloudForms managed VMs') parser.add_argument('--list', action='store_true', default=True, help='List instances (default: True)') parser.add_argument('--host', action='store', help='Get all the variables about a specific instance') parser.add_argument('--pretty', action='store_true', default=False, help='Pretty print JSON output (default: False)') parser.add_argument('--refresh-cache', action='store_true', default=False, help='Force refresh of cache by making API requests to CloudForms (default: False - use cache files)') parser.add_argument('--debug', action='store_true', default=False, help='Show debug output while running (default: False)') self.args = parser.parse_args()
Command line argument processing
awx/plugins/inventory/cloudforms.py
parse_cli_args
jmferrer/awx
37
python
def parse_cli_args(self): '\n \n ' parser = argparse.ArgumentParser(description='Produce an Ansible Inventory file based on CloudForms managed VMs') parser.add_argument('--list', action='store_true', default=True, help='List instances (default: True)') parser.add_argument('--host', action='store', help='Get all the variables about a specific instance') parser.add_argument('--pretty', action='store_true', default=False, help='Pretty print JSON output (default: False)') parser.add_argument('--refresh-cache', action='store_true', default=False, help='Force refresh of cache by making API requests to CloudForms (default: False - use cache files)') parser.add_argument('--debug', action='store_true', default=False, help='Show debug output while running (default: False)') self.args = parser.parse_args()
def parse_cli_args(self): '\n \n ' parser = argparse.ArgumentParser(description='Produce an Ansible Inventory file based on CloudForms managed VMs') parser.add_argument('--list', action='store_true', default=True, help='List instances (default: True)') parser.add_argument('--host', action='store', help='Get all the variables about a specific instance') parser.add_argument('--pretty', action='store_true', default=False, help='Pretty print JSON output (default: False)') parser.add_argument('--refresh-cache', action='store_true', default=False, help='Force refresh of cache by making API requests to CloudForms (default: False - use cache files)') parser.add_argument('--debug', action='store_true', default=False, help='Show debug output while running (default: False)') self.args = parser.parse_args()<|docstring|>Command line argument processing<|endoftext|>
488437670c9e79d4ca336f59ca055bb9aed9b7543e6435bba86f61ebb5ab1fa1
def _get_json(self, url): '\n Make a request and return the JSON\n ' results = [] ret = requests.get(url, auth=HTTPBasicAuth(self.cloudforms_username, self.cloudforms_pw), verify=self.cloudforms_ssl_verify) ret.raise_for_status() try: results = json.loads(ret.text) except ValueError: warnings.warn('Unexpected response from {0} ({1}): {2}'.format(self.cloudforms_url, ret.status_code, ret.reason)) results = {} if self.args.debug: print('=======================================================================') print('=======================================================================') print('=======================================================================') print(ret.text) print('=======================================================================') print('=======================================================================') print('=======================================================================') return results
Make a request and return the JSON
awx/plugins/inventory/cloudforms.py
_get_json
jmferrer/awx
37
python
def _get_json(self, url): '\n \n ' results = [] ret = requests.get(url, auth=HTTPBasicAuth(self.cloudforms_username, self.cloudforms_pw), verify=self.cloudforms_ssl_verify) ret.raise_for_status() try: results = json.loads(ret.text) except ValueError: warnings.warn('Unexpected response from {0} ({1}): {2}'.format(self.cloudforms_url, ret.status_code, ret.reason)) results = {} if self.args.debug: print('=======================================================================') print('=======================================================================') print('=======================================================================') print(ret.text) print('=======================================================================') print('=======================================================================') print('=======================================================================') return results
def _get_json(self, url): '\n \n ' results = [] ret = requests.get(url, auth=HTTPBasicAuth(self.cloudforms_username, self.cloudforms_pw), verify=self.cloudforms_ssl_verify) ret.raise_for_status() try: results = json.loads(ret.text) except ValueError: warnings.warn('Unexpected response from {0} ({1}): {2}'.format(self.cloudforms_url, ret.status_code, ret.reason)) results = {} if self.args.debug: print('=======================================================================') print('=======================================================================') print('=======================================================================') print(ret.text) print('=======================================================================') print('=======================================================================') print('=======================================================================') return results<|docstring|>Make a request and return the JSON<|endoftext|>
290f7292e9bc6e4197b6b14a4c189b47a9263da9cdb7340f25db1cbc03a13113
def _get_hosts(self): '\n Get all hosts by paging through the results\n ' limit = self.cloudforms_limit page = 0 last_page = False results = [] while (not last_page): offset = (page * limit) ret = self._get_json(('%s/api/vms?offset=%s&limit=%s&expand=resources,tags,hosts,&attributes=ipaddresses' % (self.cloudforms_url, offset, limit))) results += ret['resources'] if (ret['subcount'] < limit): last_page = True page += 1 return results
Get all hosts by paging through the results
awx/plugins/inventory/cloudforms.py
_get_hosts
jmferrer/awx
37
python
def _get_hosts(self): '\n \n ' limit = self.cloudforms_limit page = 0 last_page = False results = [] while (not last_page): offset = (page * limit) ret = self._get_json(('%s/api/vms?offset=%s&limit=%s&expand=resources,tags,hosts,&attributes=ipaddresses' % (self.cloudforms_url, offset, limit))) results += ret['resources'] if (ret['subcount'] < limit): last_page = True page += 1 return results
def _get_hosts(self): '\n \n ' limit = self.cloudforms_limit page = 0 last_page = False results = [] while (not last_page): offset = (page * limit) ret = self._get_json(('%s/api/vms?offset=%s&limit=%s&expand=resources,tags,hosts,&attributes=ipaddresses' % (self.cloudforms_url, offset, limit))) results += ret['resources'] if (ret['subcount'] < limit): last_page = True page += 1 return results<|docstring|>Get all hosts by paging through the results<|endoftext|>
fc173281a3141600572b1b2f7ebb19b610486d59872e63e1a6346834849ad4a2
def update_cache(self): '\n Make calls to cloudforms and save the output in a cache\n ' self.groups = dict() self.hosts = dict() if self.args.debug: print('Updating cache...') for host in self._get_hosts(): if ((self.cloudforms_suffix is not None) and (not host['name'].endswith(self.cloudforms_suffix))): host['name'] = (host['name'] + self.cloudforms_suffix) if (host['power_state'] != 'on'): if self.args.debug: print(('Skipping %s because power_state = %s' % (host['name'], host['power_state']))) continue if (self.cloudforms_purge_actions and ('actions' in host)): del host['actions'] if ('tags' in host): if ('tags' not in self.inventory): self.inventory['tags'] = dict(children=[], vars={}, hosts=[]) if (not self.cloudforms_nest_tags): for group in host['tags']: safe_key = self.to_safe(group['name']) if safe_key: if self.args.debug: print(("Adding sub-group '%s' to parent 'tags'" % safe_key)) if (safe_key not in self.inventory['tags']['children']): self.push(self.inventory['tags'], 'children', safe_key) self.push(self.inventory, safe_key, host['name']) if self.args.debug: print(('Found tag [%s] for host which will be mapped to [%s]' % (group['name'], safe_key))) else: safe_parent_tag_name = 'tags' for tag in host['tags']: tag_hierarchy = tag['name'][1:].split('/') if self.args.debug: print(('Working on list %s' % tag_hierarchy)) for tag_name in tag_hierarchy: if self.args.debug: print(('Working on tag_name = %s' % tag_name)) safe_tag_name = self.to_safe(tag_name) if self.args.debug: print(('Using sanitized name %s' % safe_tag_name)) if (safe_tag_name not in self.inventory): self.inventory[safe_tag_name] = dict(children=[], vars={}, hosts=[]) if safe_parent_tag_name: if self.args.debug: print(("Adding sub-group '%s' to parent '%s'" % (safe_tag_name, safe_parent_tag_name))) if (safe_tag_name not in self.inventory[safe_parent_tag_name]['children']): self.push(self.inventory[safe_parent_tag_name], 'children', safe_tag_name) safe_parent_tag_name = safe_tag_name self.push(self.inventory[safe_parent_tag_name], 'hosts', host['name']) if (('ipaddresses' in host) and host['ipaddresses'] and isinstance(host['ipaddresses'], list)): if (not self.cloudforms_prefer_ipv4): host['ansible_ssh_host'] = host['ipaddresses'][0] else: host['ansible_ssh_host'] = host['ipaddresses'][0] for currenthost in host['ipaddresses']: if ('.' in currenthost): host['ansible_ssh_host'] = currenthost for key in ('location', 'type', 'vendor'): safe_key = self.to_safe(host[key]) if (key not in self.inventory): self.inventory[key] = dict(children=[], vars={}, hosts=[]) if (safe_key not in self.inventory): self.inventory[safe_key] = dict(children=[], vars={}, hosts=[]) if (safe_key not in self.inventory[key]['children']): self.push(self.inventory[key], 'children', safe_key) if (key in host): self.push(self.inventory[safe_key], 'hosts', host['name']) self.hosts[host['name']] = host self.push(self.inventory, 'all', host['name']) if self.args.debug: print('Saving cached data') self.write_to_cache(self.hosts, self.cache_path_hosts) self.write_to_cache(self.inventory, self.cache_path_inventory)
Make calls to cloudforms and save the output in a cache
awx/plugins/inventory/cloudforms.py
update_cache
jmferrer/awx
37
python
def update_cache(self): '\n \n ' self.groups = dict() self.hosts = dict() if self.args.debug: print('Updating cache...') for host in self._get_hosts(): if ((self.cloudforms_suffix is not None) and (not host['name'].endswith(self.cloudforms_suffix))): host['name'] = (host['name'] + self.cloudforms_suffix) if (host['power_state'] != 'on'): if self.args.debug: print(('Skipping %s because power_state = %s' % (host['name'], host['power_state']))) continue if (self.cloudforms_purge_actions and ('actions' in host)): del host['actions'] if ('tags' in host): if ('tags' not in self.inventory): self.inventory['tags'] = dict(children=[], vars={}, hosts=[]) if (not self.cloudforms_nest_tags): for group in host['tags']: safe_key = self.to_safe(group['name']) if safe_key: if self.args.debug: print(("Adding sub-group '%s' to parent 'tags'" % safe_key)) if (safe_key not in self.inventory['tags']['children']): self.push(self.inventory['tags'], 'children', safe_key) self.push(self.inventory, safe_key, host['name']) if self.args.debug: print(('Found tag [%s] for host which will be mapped to [%s]' % (group['name'], safe_key))) else: safe_parent_tag_name = 'tags' for tag in host['tags']: tag_hierarchy = tag['name'][1:].split('/') if self.args.debug: print(('Working on list %s' % tag_hierarchy)) for tag_name in tag_hierarchy: if self.args.debug: print(('Working on tag_name = %s' % tag_name)) safe_tag_name = self.to_safe(tag_name) if self.args.debug: print(('Using sanitized name %s' % safe_tag_name)) if (safe_tag_name not in self.inventory): self.inventory[safe_tag_name] = dict(children=[], vars={}, hosts=[]) if safe_parent_tag_name: if self.args.debug: print(("Adding sub-group '%s' to parent '%s'" % (safe_tag_name, safe_parent_tag_name))) if (safe_tag_name not in self.inventory[safe_parent_tag_name]['children']): self.push(self.inventory[safe_parent_tag_name], 'children', safe_tag_name) safe_parent_tag_name = safe_tag_name self.push(self.inventory[safe_parent_tag_name], 'hosts', host['name']) if (('ipaddresses' in host) and host['ipaddresses'] and isinstance(host['ipaddresses'], list)): if (not self.cloudforms_prefer_ipv4): host['ansible_ssh_host'] = host['ipaddresses'][0] else: host['ansible_ssh_host'] = host['ipaddresses'][0] for currenthost in host['ipaddresses']: if ('.' in currenthost): host['ansible_ssh_host'] = currenthost for key in ('location', 'type', 'vendor'): safe_key = self.to_safe(host[key]) if (key not in self.inventory): self.inventory[key] = dict(children=[], vars={}, hosts=[]) if (safe_key not in self.inventory): self.inventory[safe_key] = dict(children=[], vars={}, hosts=[]) if (safe_key not in self.inventory[key]['children']): self.push(self.inventory[key], 'children', safe_key) if (key in host): self.push(self.inventory[safe_key], 'hosts', host['name']) self.hosts[host['name']] = host self.push(self.inventory, 'all', host['name']) if self.args.debug: print('Saving cached data') self.write_to_cache(self.hosts, self.cache_path_hosts) self.write_to_cache(self.inventory, self.cache_path_inventory)
def update_cache(self): '\n \n ' self.groups = dict() self.hosts = dict() if self.args.debug: print('Updating cache...') for host in self._get_hosts(): if ((self.cloudforms_suffix is not None) and (not host['name'].endswith(self.cloudforms_suffix))): host['name'] = (host['name'] + self.cloudforms_suffix) if (host['power_state'] != 'on'): if self.args.debug: print(('Skipping %s because power_state = %s' % (host['name'], host['power_state']))) continue if (self.cloudforms_purge_actions and ('actions' in host)): del host['actions'] if ('tags' in host): if ('tags' not in self.inventory): self.inventory['tags'] = dict(children=[], vars={}, hosts=[]) if (not self.cloudforms_nest_tags): for group in host['tags']: safe_key = self.to_safe(group['name']) if safe_key: if self.args.debug: print(("Adding sub-group '%s' to parent 'tags'" % safe_key)) if (safe_key not in self.inventory['tags']['children']): self.push(self.inventory['tags'], 'children', safe_key) self.push(self.inventory, safe_key, host['name']) if self.args.debug: print(('Found tag [%s] for host which will be mapped to [%s]' % (group['name'], safe_key))) else: safe_parent_tag_name = 'tags' for tag in host['tags']: tag_hierarchy = tag['name'][1:].split('/') if self.args.debug: print(('Working on list %s' % tag_hierarchy)) for tag_name in tag_hierarchy: if self.args.debug: print(('Working on tag_name = %s' % tag_name)) safe_tag_name = self.to_safe(tag_name) if self.args.debug: print(('Using sanitized name %s' % safe_tag_name)) if (safe_tag_name not in self.inventory): self.inventory[safe_tag_name] = dict(children=[], vars={}, hosts=[]) if safe_parent_tag_name: if self.args.debug: print(("Adding sub-group '%s' to parent '%s'" % (safe_tag_name, safe_parent_tag_name))) if (safe_tag_name not in self.inventory[safe_parent_tag_name]['children']): self.push(self.inventory[safe_parent_tag_name], 'children', safe_tag_name) safe_parent_tag_name = safe_tag_name self.push(self.inventory[safe_parent_tag_name], 'hosts', host['name']) if (('ipaddresses' in host) and host['ipaddresses'] and isinstance(host['ipaddresses'], list)): if (not self.cloudforms_prefer_ipv4): host['ansible_ssh_host'] = host['ipaddresses'][0] else: host['ansible_ssh_host'] = host['ipaddresses'][0] for currenthost in host['ipaddresses']: if ('.' in currenthost): host['ansible_ssh_host'] = currenthost for key in ('location', 'type', 'vendor'): safe_key = self.to_safe(host[key]) if (key not in self.inventory): self.inventory[key] = dict(children=[], vars={}, hosts=[]) if (safe_key not in self.inventory): self.inventory[safe_key] = dict(children=[], vars={}, hosts=[]) if (safe_key not in self.inventory[key]['children']): self.push(self.inventory[key], 'children', safe_key) if (key in host): self.push(self.inventory[safe_key], 'hosts', host['name']) self.hosts[host['name']] = host self.push(self.inventory, 'all', host['name']) if self.args.debug: print('Saving cached data') self.write_to_cache(self.hosts, self.cache_path_hosts) self.write_to_cache(self.inventory, self.cache_path_inventory)<|docstring|>Make calls to cloudforms and save the output in a cache<|endoftext|>
5d9582b09b0cb9ce3955bd070fbe4d9c77fe2670ac7f5e5623037434782eae36
def get_host_info(self, host): '\n Get variables about a specific host\n ' if ((not self.hosts) or (len(self.hosts) == 0)): self.load_hosts_from_cache() if (host not in self.hosts): if self.args.debug: print(('[%s] not found in cache.' % host)) self.update_cache() if (host not in self.hosts): if self.args.debug: print(('[%s] does not exist after cache update.' % host)) return self.json_format_dict({}, self.args.pretty) return self.json_format_dict(self.hosts[host], self.args.pretty)
Get variables about a specific host
awx/plugins/inventory/cloudforms.py
get_host_info
jmferrer/awx
37
python
def get_host_info(self, host): '\n \n ' if ((not self.hosts) or (len(self.hosts) == 0)): self.load_hosts_from_cache() if (host not in self.hosts): if self.args.debug: print(('[%s] not found in cache.' % host)) self.update_cache() if (host not in self.hosts): if self.args.debug: print(('[%s] does not exist after cache update.' % host)) return self.json_format_dict({}, self.args.pretty) return self.json_format_dict(self.hosts[host], self.args.pretty)
def get_host_info(self, host): '\n \n ' if ((not self.hosts) or (len(self.hosts) == 0)): self.load_hosts_from_cache() if (host not in self.hosts): if self.args.debug: print(('[%s] not found in cache.' % host)) self.update_cache() if (host not in self.hosts): if self.args.debug: print(('[%s] does not exist after cache update.' % host)) return self.json_format_dict({}, self.args.pretty) return self.json_format_dict(self.hosts[host], self.args.pretty)<|docstring|>Get variables about a specific host<|endoftext|>
5963e61974b6e961bb1fc1e67518b9f8aadc79f9658e2484f77a76cf005e8aff
def push(self, d, k, v): '\n Safely puts a new entry onto an array.\n ' if (k in d): d[k].append(v) else: d[k] = [v]
Safely puts a new entry onto an array.
awx/plugins/inventory/cloudforms.py
push
jmferrer/awx
37
python
def push(self, d, k, v): '\n \n ' if (k in d): d[k].append(v) else: d[k] = [v]
def push(self, d, k, v): '\n \n ' if (k in d): d[k].append(v) else: d[k] = [v]<|docstring|>Safely puts a new entry onto an array.<|endoftext|>
c043f774dc6fd880cd6703e4014f6515d1d4c038c2a4ac1aad28d2f9345e02bf
def load_inventory_from_cache(self): '\n Reads the inventory from the cache file sets self.inventory\n ' cache = open(self.cache_path_inventory, 'r') json_inventory = cache.read() self.inventory = json.loads(json_inventory)
Reads the inventory from the cache file sets self.inventory
awx/plugins/inventory/cloudforms.py
load_inventory_from_cache
jmferrer/awx
37
python
def load_inventory_from_cache(self): '\n \n ' cache = open(self.cache_path_inventory, 'r') json_inventory = cache.read() self.inventory = json.loads(json_inventory)
def load_inventory_from_cache(self): '\n \n ' cache = open(self.cache_path_inventory, 'r') json_inventory = cache.read() self.inventory = json.loads(json_inventory)<|docstring|>Reads the inventory from the cache file sets self.inventory<|endoftext|>
491cf45641d319ea3b7eaeda18bf3ddc16712f490b70965e6e89562d4a325865
def load_hosts_from_cache(self): '\n Reads the cache from the cache file sets self.hosts\n ' cache = open(self.cache_path_hosts, 'r') json_cache = cache.read() self.hosts = json.loads(json_cache)
Reads the cache from the cache file sets self.hosts
awx/plugins/inventory/cloudforms.py
load_hosts_from_cache
jmferrer/awx
37
python
def load_hosts_from_cache(self): '\n \n ' cache = open(self.cache_path_hosts, 'r') json_cache = cache.read() self.hosts = json.loads(json_cache)
def load_hosts_from_cache(self): '\n \n ' cache = open(self.cache_path_hosts, 'r') json_cache = cache.read() self.hosts = json.loads(json_cache)<|docstring|>Reads the cache from the cache file sets self.hosts<|endoftext|>
09362cae59fa466fc4e061ac0e1b5dde1a7d3b7deb81ee03b5fc5e21b0b1c3ce
def write_to_cache(self, data, filename): '\n Writes data in JSON format to a file\n ' json_data = self.json_format_dict(data, True) cache = open(filename, 'w') cache.write(json_data) cache.close()
Writes data in JSON format to a file
awx/plugins/inventory/cloudforms.py
write_to_cache
jmferrer/awx
37
python
def write_to_cache(self, data, filename): '\n \n ' json_data = self.json_format_dict(data, True) cache = open(filename, 'w') cache.write(json_data) cache.close()
def write_to_cache(self, data, filename): '\n \n ' json_data = self.json_format_dict(data, True) cache = open(filename, 'w') cache.write(json_data) cache.close()<|docstring|>Writes data in JSON format to a file<|endoftext|>
1599cebb70584db2e88015cb21c222efa53adeca57348707c7e5b9cc0c76efd1
def to_safe(self, word): "\n Converts 'bad' characters in a string to underscores so they can be used as Ansible groups\n " if self.cloudforms_clean_group_keys: regex = '[^A-Za-z0-9\\_]' return re.sub(regex, '_', word.replace(' ', '')) else: return word
Converts 'bad' characters in a string to underscores so they can be used as Ansible groups
awx/plugins/inventory/cloudforms.py
to_safe
jmferrer/awx
37
python
def to_safe(self, word): "\n \n " if self.cloudforms_clean_group_keys: regex = '[^A-Za-z0-9\\_]' return re.sub(regex, '_', word.replace(' ', )) else: return word
def to_safe(self, word): "\n \n " if self.cloudforms_clean_group_keys: regex = '[^A-Za-z0-9\\_]' return re.sub(regex, '_', word.replace(' ', )) else: return word<|docstring|>Converts 'bad' characters in a string to underscores so they can be used as Ansible groups<|endoftext|>
bb849067831f9ed90e32e322a516e7c8a6a9805f896bc638400ac51a2acbe7f3
def json_format_dict(self, data, pretty=False): '\n Converts a dict to a JSON object and dumps it as a formatted string\n ' if pretty: return json.dumps(data, sort_keys=True, indent=2) else: return json.dumps(data)
Converts a dict to a JSON object and dumps it as a formatted string
awx/plugins/inventory/cloudforms.py
json_format_dict
jmferrer/awx
37
python
def json_format_dict(self, data, pretty=False): '\n \n ' if pretty: return json.dumps(data, sort_keys=True, indent=2) else: return json.dumps(data)
def json_format_dict(self, data, pretty=False): '\n \n ' if pretty: return json.dumps(data, sort_keys=True, indent=2) else: return json.dumps(data)<|docstring|>Converts a dict to a JSON object and dumps it as a formatted string<|endoftext|>
ce0629d889753c2bcfa20ae0d4f5bb613942b0d4f4e921db826d349864b2fa9a
def gnome_sort(arr: list[int]): '\n ## Complexities:\n ```py\n Worst Case Time Complexity == O(n * n)\n Average Case Time Complexity == O(n * n)\n Best Case Time Complexity == O(n)\n Space Complexity == O(1) Auxiliary\n ```\n ' for p in range(1, len(arr)): position: int = p while ((position > 0) and (arr[(position - 1)] > arr[position])): (arr[(position - 1)], arr[position]) = (arr[position], arr[(position - 1)]) position -= 1 plot(p, arr, other_highlights=[position])
## Complexities: ```py Worst Case Time Complexity == O(n * n) Average Case Time Complexity == O(n * n) Best Case Time Complexity == O(n) Space Complexity == O(1) Auxiliary ```
src/algorithms/gnome_sort.py
gnome_sort
c1m50c/visual-sorting-algorithms
0
python
def gnome_sort(arr: list[int]): '\n ## Complexities:\n ```py\n Worst Case Time Complexity == O(n * n)\n Average Case Time Complexity == O(n * n)\n Best Case Time Complexity == O(n)\n Space Complexity == O(1) Auxiliary\n ```\n ' for p in range(1, len(arr)): position: int = p while ((position > 0) and (arr[(position - 1)] > arr[position])): (arr[(position - 1)], arr[position]) = (arr[position], arr[(position - 1)]) position -= 1 plot(p, arr, other_highlights=[position])
def gnome_sort(arr: list[int]): '\n ## Complexities:\n ```py\n Worst Case Time Complexity == O(n * n)\n Average Case Time Complexity == O(n * n)\n Best Case Time Complexity == O(n)\n Space Complexity == O(1) Auxiliary\n ```\n ' for p in range(1, len(arr)): position: int = p while ((position > 0) and (arr[(position - 1)] > arr[position])): (arr[(position - 1)], arr[position]) = (arr[position], arr[(position - 1)]) position -= 1 plot(p, arr, other_highlights=[position])<|docstring|>## Complexities: ```py Worst Case Time Complexity == O(n * n) Average Case Time Complexity == O(n * n) Best Case Time Complexity == O(n) Space Complexity == O(1) Auxiliary ```<|endoftext|>
2700a7969b1a90d76a0dda76f124627b57654ad5828bece95edcfdd070b9d5c7
def onLoad(self): '\n Load the parser.\n ' self.parse = self.core.findPlugin('NMDC Parser') self.googleLimit = 2
Load the parser.
src/plugins/dc/Search/main.py
onLoad
nsk89/nodeforge
0
python
def onLoad(self): '\n \n ' self.parse = self.core.findPlugin('NMDC Parser') self.googleLimit = 2
def onLoad(self): '\n \n ' self.parse = self.core.findPlugin('NMDC Parser') self.googleLimit = 2<|docstring|>Load the parser.<|endoftext|>
4c4e692f2e8d1b7ee9503f0da22543738d0f47425bdd549f0e2c10fb682c9b56
def __init__(self, remote: str=None, username: str=None, password: str=None, cache: str=None, signoff: Optional[bool]=None, exists: bool=True, log_level: int=logging.INFO): "\n Initialize a new instance of :class:`GitIndex`.\n\n :param remote: Remote repository's address where the index is maintained.\n :param username: Username for credentials if protocol is not ssh.\n :param password: Password for credentials if protocol is not ssh.\n :param cache: Path to the folder where the repo will be cached, defaults to `~/.cache`.\n :param signoff: Whether to add a DCO to the commit message.\n :param exists: Whether the Git remote exists or not. If it doesn't, we are initializing (allows to catch some errors).\n :param log_level: The logging level of this instance.\n :raise ValueError: If missing credential, incorrect url, incorrect credentials or index JSON file is not found/unreadable.\n " self._log = logging.getLogger(type(self).__name__) self._log.setLevel(log_level) if (remote is None): remote = config.INDEX_REPO if (cache is None): cache = config.vendor_cache_dir() if (not signoff): signoff = config.ALWAYS_SIGNOFF self.signoff = signoff parsed_url = urlparse(remote) errmsg = ('Invalid index URL: "%s"' % remote) if ((not parsed_url.scheme) or (parsed_url.scheme not in ('git', 'git+ssh', 'ssh', 'http', 'https'))): self._log.critical('Parsed URL does not contain a valid protocol') raise ValueError(errmsg) if (not parsed_url.netloc): self._log.critical('Parsed URL does not contain a valid domain') raise ValueError(errmsg) if (not parsed_url.path): self._log.critical('Parsed URL does not contain a valid repository path') raise ValueError(errmsg) self.repo = parsed_url.path if self.repo.startswith('/'): self.repo = self.repo[1:] if self.repo.endswith('.git'): self.repo = self.repo[:(- 4)] self.cached_repo = os.path.join(cache, self.repo) if (username and password): auth = (((username + ':') + password) + '@') self.remote_url = (self.REMOTE_URL % (parsed_url.scheme, auth, parsed_url.netloc, self.repo)) elif ((username is None) != (password is None)): msg = ('Both username and password must be supplied to access "%s"' % remote) self._log.critical(msg) raise ValueError(msg) else: self.remote_url = remote self.contents = {} try: self.fetch() except NotGitRepository as e: self._log.critical(('Repository does not exist: %s' % e)) raise ValueError from e except HangupException as e: self._log.critical(('Check SSH is configured, or connection is stable: %s' % e)) raise ValueError from e except GitProtocolError as e: self._log.critical(('%s: %s\nCheck your Git credentials' % (type(e), e))) raise ValueError from e except (FileNotFoundError, ValueError) as e: if exists: self._log.critical('%s does not exist or is unreadable, please run `init` command', self.INDEX_FILE) raise ValueError from e self.models = self.contents.get('models', {}) self.meta = self.contents.get('meta', {})
Initialize a new instance of :class:`GitIndex`. :param remote: Remote repository's address where the index is maintained. :param username: Username for credentials if protocol is not ssh. :param password: Password for credentials if protocol is not ssh. :param cache: Path to the folder where the repo will be cached, defaults to `~/.cache`. :param signoff: Whether to add a DCO to the commit message. :param exists: Whether the Git remote exists or not. If it doesn't, we are initializing (allows to catch some errors). :param log_level: The logging level of this instance. :raise ValueError: If missing credential, incorrect url, incorrect credentials or index JSON file is not found/unreadable.
modelforge/index.py
__init__
src-d/modelforge
19
python
def __init__(self, remote: str=None, username: str=None, password: str=None, cache: str=None, signoff: Optional[bool]=None, exists: bool=True, log_level: int=logging.INFO): "\n Initialize a new instance of :class:`GitIndex`.\n\n :param remote: Remote repository's address where the index is maintained.\n :param username: Username for credentials if protocol is not ssh.\n :param password: Password for credentials if protocol is not ssh.\n :param cache: Path to the folder where the repo will be cached, defaults to `~/.cache`.\n :param signoff: Whether to add a DCO to the commit message.\n :param exists: Whether the Git remote exists or not. If it doesn't, we are initializing (allows to catch some errors).\n :param log_level: The logging level of this instance.\n :raise ValueError: If missing credential, incorrect url, incorrect credentials or index JSON file is not found/unreadable.\n " self._log = logging.getLogger(type(self).__name__) self._log.setLevel(log_level) if (remote is None): remote = config.INDEX_REPO if (cache is None): cache = config.vendor_cache_dir() if (not signoff): signoff = config.ALWAYS_SIGNOFF self.signoff = signoff parsed_url = urlparse(remote) errmsg = ('Invalid index URL: "%s"' % remote) if ((not parsed_url.scheme) or (parsed_url.scheme not in ('git', 'git+ssh', 'ssh', 'http', 'https'))): self._log.critical('Parsed URL does not contain a valid protocol') raise ValueError(errmsg) if (not parsed_url.netloc): self._log.critical('Parsed URL does not contain a valid domain') raise ValueError(errmsg) if (not parsed_url.path): self._log.critical('Parsed URL does not contain a valid repository path') raise ValueError(errmsg) self.repo = parsed_url.path if self.repo.startswith('/'): self.repo = self.repo[1:] if self.repo.endswith('.git'): self.repo = self.repo[:(- 4)] self.cached_repo = os.path.join(cache, self.repo) if (username and password): auth = (((username + ':') + password) + '@') self.remote_url = (self.REMOTE_URL % (parsed_url.scheme, auth, parsed_url.netloc, self.repo)) elif ((username is None) != (password is None)): msg = ('Both username and password must be supplied to access "%s"' % remote) self._log.critical(msg) raise ValueError(msg) else: self.remote_url = remote self.contents = {} try: self.fetch() except NotGitRepository as e: self._log.critical(('Repository does not exist: %s' % e)) raise ValueError from e except HangupException as e: self._log.critical(('Check SSH is configured, or connection is stable: %s' % e)) raise ValueError from e except GitProtocolError as e: self._log.critical(('%s: %s\nCheck your Git credentials' % (type(e), e))) raise ValueError from e except (FileNotFoundError, ValueError) as e: if exists: self._log.critical('%s does not exist or is unreadable, please run `init` command', self.INDEX_FILE) raise ValueError from e self.models = self.contents.get('models', {}) self.meta = self.contents.get('meta', {})
def __init__(self, remote: str=None, username: str=None, password: str=None, cache: str=None, signoff: Optional[bool]=None, exists: bool=True, log_level: int=logging.INFO): "\n Initialize a new instance of :class:`GitIndex`.\n\n :param remote: Remote repository's address where the index is maintained.\n :param username: Username for credentials if protocol is not ssh.\n :param password: Password for credentials if protocol is not ssh.\n :param cache: Path to the folder where the repo will be cached, defaults to `~/.cache`.\n :param signoff: Whether to add a DCO to the commit message.\n :param exists: Whether the Git remote exists or not. If it doesn't, we are initializing (allows to catch some errors).\n :param log_level: The logging level of this instance.\n :raise ValueError: If missing credential, incorrect url, incorrect credentials or index JSON file is not found/unreadable.\n " self._log = logging.getLogger(type(self).__name__) self._log.setLevel(log_level) if (remote is None): remote = config.INDEX_REPO if (cache is None): cache = config.vendor_cache_dir() if (not signoff): signoff = config.ALWAYS_SIGNOFF self.signoff = signoff parsed_url = urlparse(remote) errmsg = ('Invalid index URL: "%s"' % remote) if ((not parsed_url.scheme) or (parsed_url.scheme not in ('git', 'git+ssh', 'ssh', 'http', 'https'))): self._log.critical('Parsed URL does not contain a valid protocol') raise ValueError(errmsg) if (not parsed_url.netloc): self._log.critical('Parsed URL does not contain a valid domain') raise ValueError(errmsg) if (not parsed_url.path): self._log.critical('Parsed URL does not contain a valid repository path') raise ValueError(errmsg) self.repo = parsed_url.path if self.repo.startswith('/'): self.repo = self.repo[1:] if self.repo.endswith('.git'): self.repo = self.repo[:(- 4)] self.cached_repo = os.path.join(cache, self.repo) if (username and password): auth = (((username + ':') + password) + '@') self.remote_url = (self.REMOTE_URL % (parsed_url.scheme, auth, parsed_url.netloc, self.repo)) elif ((username is None) != (password is None)): msg = ('Both username and password must be supplied to access "%s"' % remote) self._log.critical(msg) raise ValueError(msg) else: self.remote_url = remote self.contents = {} try: self.fetch() except NotGitRepository as e: self._log.critical(('Repository does not exist: %s' % e)) raise ValueError from e except HangupException as e: self._log.critical(('Check SSH is configured, or connection is stable: %s' % e)) raise ValueError from e except GitProtocolError as e: self._log.critical(('%s: %s\nCheck your Git credentials' % (type(e), e))) raise ValueError from e except (FileNotFoundError, ValueError) as e: if exists: self._log.critical('%s does not exist or is unreadable, please run `init` command', self.INDEX_FILE) raise ValueError from e self.models = self.contents.get('models', {}) self.meta = self.contents.get('meta', {})<|docstring|>Initialize a new instance of :class:`GitIndex`. :param remote: Remote repository's address where the index is maintained. :param username: Username for credentials if protocol is not ssh. :param password: Password for credentials if protocol is not ssh. :param cache: Path to the folder where the repo will be cached, defaults to `~/.cache`. :param signoff: Whether to add a DCO to the commit message. :param exists: Whether the Git remote exists or not. If it doesn't, we are initializing (allows to catch some errors). :param log_level: The logging level of this instance. :raise ValueError: If missing credential, incorrect url, incorrect credentials or index JSON file is not found/unreadable.<|endoftext|>
5fba859c208e29badeb472b93ecba6ab9578fc979f25c3b23e6ebcb81ef239a4
def fetch(self): 'Load from the associated Git repository.' os.makedirs(os.path.dirname(self.cached_repo), exist_ok=True) if (not os.path.exists(self.cached_repo)): self._log.warning('Index not found, caching %s in %s', self.repo, self.cached_repo) git.clone(self.remote_url, self.cached_repo, checkout=True) else: self._log.debug('Index is cached in %s', self.cached_repo) try: diff = self._are_local_and_remote_heads_different() except Exception as e: self._log.warning('There was a problem with reading the cached index so cloning from scratch: %s: %s', type(e).__name__, e) shutil.rmtree(self.cached_repo) self.fetch() return if diff: self._log.info('Cached index is not up to date, pulling %s', self.repo) git.pull(self.cached_repo, self.remote_url) with open(os.path.join(self.cached_repo, self.INDEX_FILE), encoding='utf-8') as _in: self.contents = json.load(_in)
Load from the associated Git repository.
modelforge/index.py
fetch
src-d/modelforge
19
python
def fetch(self): os.makedirs(os.path.dirname(self.cached_repo), exist_ok=True) if (not os.path.exists(self.cached_repo)): self._log.warning('Index not found, caching %s in %s', self.repo, self.cached_repo) git.clone(self.remote_url, self.cached_repo, checkout=True) else: self._log.debug('Index is cached in %s', self.cached_repo) try: diff = self._are_local_and_remote_heads_different() except Exception as e: self._log.warning('There was a problem with reading the cached index so cloning from scratch: %s: %s', type(e).__name__, e) shutil.rmtree(self.cached_repo) self.fetch() return if diff: self._log.info('Cached index is not up to date, pulling %s', self.repo) git.pull(self.cached_repo, self.remote_url) with open(os.path.join(self.cached_repo, self.INDEX_FILE), encoding='utf-8') as _in: self.contents = json.load(_in)
def fetch(self): os.makedirs(os.path.dirname(self.cached_repo), exist_ok=True) if (not os.path.exists(self.cached_repo)): self._log.warning('Index not found, caching %s in %s', self.repo, self.cached_repo) git.clone(self.remote_url, self.cached_repo, checkout=True) else: self._log.debug('Index is cached in %s', self.cached_repo) try: diff = self._are_local_and_remote_heads_different() except Exception as e: self._log.warning('There was a problem with reading the cached index so cloning from scratch: %s: %s', type(e).__name__, e) shutil.rmtree(self.cached_repo) self.fetch() return if diff: self._log.info('Cached index is not up to date, pulling %s', self.repo) git.pull(self.cached_repo, self.remote_url) with open(os.path.join(self.cached_repo, self.INDEX_FILE), encoding='utf-8') as _in: self.contents = json.load(_in)<|docstring|>Load from the associated Git repository.<|endoftext|>
8e4b22695eec2da3d1b6446a581c38f6ff3314a04c52a6eb80566790b110bc81
def remove_model(self, model_uuid: str) -> dict: 'Delete the model from the registry. Call `upload()` to update the remote side.' model_type = None for (key, val) in self.models.items(): if (model_uuid in val): self._log.info('Found %s among %s models', model_uuid, key) model_type = key break if (model_type is None): self._log.error('Model not found, aborted') raise ValueError model_directory = os.path.join(self.cached_repo, model_type) model_node = self.models[model_type] meta_node = self.meta[model_type] if (len(model_node) == 1): self.models.pop(model_type) self.meta.pop(model_type) paths = [os.path.join(model_directory, model) for model in os.listdir(model_directory)] else: if (meta_node['default'] == model_uuid): self._log.info('Model is set as default, removing from index ...') meta_node['default'] = '' model_node.pop(model_uuid) paths = [os.path.join(model_directory, (model_uuid + '.md'))] git.remove(self.cached_repo, paths) return {'model': model_type, 'uuid': model_uuid}
Delete the model from the registry. Call `upload()` to update the remote side.
modelforge/index.py
remove_model
src-d/modelforge
19
python
def remove_model(self, model_uuid: str) -> dict: model_type = None for (key, val) in self.models.items(): if (model_uuid in val): self._log.info('Found %s among %s models', model_uuid, key) model_type = key break if (model_type is None): self._log.error('Model not found, aborted') raise ValueError model_directory = os.path.join(self.cached_repo, model_type) model_node = self.models[model_type] meta_node = self.meta[model_type] if (len(model_node) == 1): self.models.pop(model_type) self.meta.pop(model_type) paths = [os.path.join(model_directory, model) for model in os.listdir(model_directory)] else: if (meta_node['default'] == model_uuid): self._log.info('Model is set as default, removing from index ...') meta_node['default'] = model_node.pop(model_uuid) paths = [os.path.join(model_directory, (model_uuid + '.md'))] git.remove(self.cached_repo, paths) return {'model': model_type, 'uuid': model_uuid}
def remove_model(self, model_uuid: str) -> dict: model_type = None for (key, val) in self.models.items(): if (model_uuid in val): self._log.info('Found %s among %s models', model_uuid, key) model_type = key break if (model_type is None): self._log.error('Model not found, aborted') raise ValueError model_directory = os.path.join(self.cached_repo, model_type) model_node = self.models[model_type] meta_node = self.meta[model_type] if (len(model_node) == 1): self.models.pop(model_type) self.meta.pop(model_type) paths = [os.path.join(model_directory, model) for model in os.listdir(model_directory)] else: if (meta_node['default'] == model_uuid): self._log.info('Model is set as default, removing from index ...') meta_node['default'] = model_node.pop(model_uuid) paths = [os.path.join(model_directory, (model_uuid + '.md'))] git.remove(self.cached_repo, paths) return {'model': model_type, 'uuid': model_uuid}<|docstring|>Delete the model from the registry. Call `upload()` to update the remote side.<|endoftext|>
60bd8053d1b2181626233afb508bb4370fe1c8f2b7b9d6ad12f5f50975898310
def add_model(self, model_type: str, model_uuid: str, meta: dict, template_model: Template, update_default: bool=False): 'Add a new model to the registry. Call `upload()` to update the remote side.' if (update_default or (model_type not in self.meta)): self.meta[model_type] = meta['default'] model_meta = meta['model'] self.models.setdefault(model_type, {})[model_uuid] = model_meta model_directory = os.path.join(self.cached_repo, model_type) os.makedirs(model_directory, exist_ok=True) model = os.path.join(model_directory, (model_uuid + '.md')) if os.path.exists(model): os.remove(model) links = {} for (m_type, items) in self.models.items(): for uuid in items: if (uuid in model_meta['dependencies']): links[uuid] = os.path.join('/', m_type, ('%s.md' % uuid)) with open(model, 'w') as fout: fout.write(template_model.render(model_type=model_type, model_uuid=model_uuid, meta=model_meta, links=links, spdx=LICENSES)) git.add(self.cached_repo, [model]) self._log.info('Added %s', model)
Add a new model to the registry. Call `upload()` to update the remote side.
modelforge/index.py
add_model
src-d/modelforge
19
python
def add_model(self, model_type: str, model_uuid: str, meta: dict, template_model: Template, update_default: bool=False): if (update_default or (model_type not in self.meta)): self.meta[model_type] = meta['default'] model_meta = meta['model'] self.models.setdefault(model_type, {})[model_uuid] = model_meta model_directory = os.path.join(self.cached_repo, model_type) os.makedirs(model_directory, exist_ok=True) model = os.path.join(model_directory, (model_uuid + '.md')) if os.path.exists(model): os.remove(model) links = {} for (m_type, items) in self.models.items(): for uuid in items: if (uuid in model_meta['dependencies']): links[uuid] = os.path.join('/', m_type, ('%s.md' % uuid)) with open(model, 'w') as fout: fout.write(template_model.render(model_type=model_type, model_uuid=model_uuid, meta=model_meta, links=links, spdx=LICENSES)) git.add(self.cached_repo, [model]) self._log.info('Added %s', model)
def add_model(self, model_type: str, model_uuid: str, meta: dict, template_model: Template, update_default: bool=False): if (update_default or (model_type not in self.meta)): self.meta[model_type] = meta['default'] model_meta = meta['model'] self.models.setdefault(model_type, {})[model_uuid] = model_meta model_directory = os.path.join(self.cached_repo, model_type) os.makedirs(model_directory, exist_ok=True) model = os.path.join(model_directory, (model_uuid + '.md')) if os.path.exists(model): os.remove(model) links = {} for (m_type, items) in self.models.items(): for uuid in items: if (uuid in model_meta['dependencies']): links[uuid] = os.path.join('/', m_type, ('%s.md' % uuid)) with open(model, 'w') as fout: fout.write(template_model.render(model_type=model_type, model_uuid=model_uuid, meta=model_meta, links=links, spdx=LICENSES)) git.add(self.cached_repo, [model]) self._log.info('Added %s', model)<|docstring|>Add a new model to the registry. Call `upload()` to update the remote side.<|endoftext|>
6ed5bf3c11518cf2aca5582773a335d5d30405679447dbd12f40bd398fcd6c31
def update_readme(self, template_readme: Template): 'Generate the new README file locally.' readme = os.path.join(self.cached_repo, 'README.md') if os.path.exists(readme): os.remove(readme) links = {model_type: {} for model_type in self.models.keys()} for (model_type, model_uuids) in self.models.items(): for model_uuid in model_uuids: links[model_type][model_uuid] = os.path.join('/', model_type, ('%s.md' % model_uuid)) with open(readme, 'w') as fout: fout.write(template_readme.render(models=self.models, meta=self.meta, links=links)) git.add(self.cached_repo, [readme]) self._log.info('Updated %s', readme)
Generate the new README file locally.
modelforge/index.py
update_readme
src-d/modelforge
19
python
def update_readme(self, template_readme: Template): readme = os.path.join(self.cached_repo, 'README.md') if os.path.exists(readme): os.remove(readme) links = {model_type: {} for model_type in self.models.keys()} for (model_type, model_uuids) in self.models.items(): for model_uuid in model_uuids: links[model_type][model_uuid] = os.path.join('/', model_type, ('%s.md' % model_uuid)) with open(readme, 'w') as fout: fout.write(template_readme.render(models=self.models, meta=self.meta, links=links)) git.add(self.cached_repo, [readme]) self._log.info('Updated %s', readme)
def update_readme(self, template_readme: Template): readme = os.path.join(self.cached_repo, 'README.md') if os.path.exists(readme): os.remove(readme) links = {model_type: {} for model_type in self.models.keys()} for (model_type, model_uuids) in self.models.items(): for model_uuid in model_uuids: links[model_type][model_uuid] = os.path.join('/', model_type, ('%s.md' % model_uuid)) with open(readme, 'w') as fout: fout.write(template_readme.render(models=self.models, meta=self.meta, links=links)) git.add(self.cached_repo, [readme]) self._log.info('Updated %s', readme)<|docstring|>Generate the new README file locally.<|endoftext|>
11ccd5df706ab99c91eba5f010ec692c996191f874da1b8c7ca89db0b2eb06ac
def reset(self): 'Initialize the remote Git repository.' paths = [] for filename in os.listdir(self.cached_repo): if filename.startswith('.git'): continue path = os.path.join(self.cached_repo, filename) if os.path.isfile(path): paths.append(path) elif os.path.isdir(path): for model in os.listdir(path): paths.append(os.path.join(path, model)) git.remove(self.cached_repo, paths) self.contents = {'models': {}, 'meta': {}}
Initialize the remote Git repository.
modelforge/index.py
reset
src-d/modelforge
19
python
def reset(self): paths = [] for filename in os.listdir(self.cached_repo): if filename.startswith('.git'): continue path = os.path.join(self.cached_repo, filename) if os.path.isfile(path): paths.append(path) elif os.path.isdir(path): for model in os.listdir(path): paths.append(os.path.join(path, model)) git.remove(self.cached_repo, paths) self.contents = {'models': {}, 'meta': {}}
def reset(self): paths = [] for filename in os.listdir(self.cached_repo): if filename.startswith('.git'): continue path = os.path.join(self.cached_repo, filename) if os.path.isfile(path): paths.append(path) elif os.path.isdir(path): for model in os.listdir(path): paths.append(os.path.join(path, model)) git.remove(self.cached_repo, paths) self.contents = {'models': {}, 'meta': {}}<|docstring|>Initialize the remote Git repository.<|endoftext|>
574083ae4593f550fbd5a488d4b69e7c132fc9fc63a6da48dd56addcd63958bd
def upload(self, cmd: str, meta: dict): 'Push the current state of the registry to Git.' index = os.path.join(self.cached_repo, self.INDEX_FILE) if os.path.exists(index): os.remove(index) self._log.info('Writing the new index.json ...') with open(index, 'w') as _out: json.dump(self.contents, _out, sort_keys=True, indent=4) git.add(self.cached_repo, [index]) message = self.COMMIT_MESSAGES[cmd].format(**meta) if self.signoff: global_conf_path = os.path.expanduser('~/.gitconfig') if os.path.exists(global_conf_path): with open(global_conf_path, 'br') as _in: conf = ConfigFile.from_file(_in) try: name = conf.get(b'user', b'name').decode() email = conf.get(b'user', b'email').decode() message += self.DCO_MESSAGE.format(name=name, email=email) except KeyError: self._log.warning('Did not find name or email in %s, committing without DCO', global_conf_path) else: self._log.warning('Global git configuration file %s does not exist, committing without DCO', global_conf_path) else: self._log.info('Committing the index without DCO') git.commit(self.cached_repo, message=message) self._log.info('Pushing the updated index ...') git.push(self.cached_repo, self.remote_url, b'master') if self._are_local_and_remote_heads_different(): self._log.error('Push has failed') raise ValueError('Push has failed')
Push the current state of the registry to Git.
modelforge/index.py
upload
src-d/modelforge
19
python
def upload(self, cmd: str, meta: dict): index = os.path.join(self.cached_repo, self.INDEX_FILE) if os.path.exists(index): os.remove(index) self._log.info('Writing the new index.json ...') with open(index, 'w') as _out: json.dump(self.contents, _out, sort_keys=True, indent=4) git.add(self.cached_repo, [index]) message = self.COMMIT_MESSAGES[cmd].format(**meta) if self.signoff: global_conf_path = os.path.expanduser('~/.gitconfig') if os.path.exists(global_conf_path): with open(global_conf_path, 'br') as _in: conf = ConfigFile.from_file(_in) try: name = conf.get(b'user', b'name').decode() email = conf.get(b'user', b'email').decode() message += self.DCO_MESSAGE.format(name=name, email=email) except KeyError: self._log.warning('Did not find name or email in %s, committing without DCO', global_conf_path) else: self._log.warning('Global git configuration file %s does not exist, committing without DCO', global_conf_path) else: self._log.info('Committing the index without DCO') git.commit(self.cached_repo, message=message) self._log.info('Pushing the updated index ...') git.push(self.cached_repo, self.remote_url, b'master') if self._are_local_and_remote_heads_different(): self._log.error('Push has failed') raise ValueError('Push has failed')
def upload(self, cmd: str, meta: dict): index = os.path.join(self.cached_repo, self.INDEX_FILE) if os.path.exists(index): os.remove(index) self._log.info('Writing the new index.json ...') with open(index, 'w') as _out: json.dump(self.contents, _out, sort_keys=True, indent=4) git.add(self.cached_repo, [index]) message = self.COMMIT_MESSAGES[cmd].format(**meta) if self.signoff: global_conf_path = os.path.expanduser('~/.gitconfig') if os.path.exists(global_conf_path): with open(global_conf_path, 'br') as _in: conf = ConfigFile.from_file(_in) try: name = conf.get(b'user', b'name').decode() email = conf.get(b'user', b'email').decode() message += self.DCO_MESSAGE.format(name=name, email=email) except KeyError: self._log.warning('Did not find name or email in %s, committing without DCO', global_conf_path) else: self._log.warning('Global git configuration file %s does not exist, committing without DCO', global_conf_path) else: self._log.info('Committing the index without DCO') git.commit(self.cached_repo, message=message) self._log.info('Pushing the updated index ...') git.push(self.cached_repo, self.remote_url, b'master') if self._are_local_and_remote_heads_different(): self._log.error('Push has failed') raise ValueError('Push has failed')<|docstring|>Push the current state of the registry to Git.<|endoftext|>
c1e31a2dc2717ddbe26dd521f1bc7cee5db54c8bc89ffcc1398c201ca99ee67c
def load_template(self, template: str) -> Template: 'Load a Jinja2 template from the source directory.' env = dict(trim_blocks=True, lstrip_blocks=True, keep_trailing_newline=False) jinja2_ext = '.jinja2' if (not template.endswith(jinja2_ext)): self._log.error(('Template file name must end with %s' % jinja2_ext)) raise ValueError if (not template[:(- len(jinja2_ext))].endswith('.md')): self._log.error('Template file should be a Markdown file') raise ValueError if (not os.path.isabs(template)): template = os.path.join(os.path.dirname(__file__), template) with open(template, encoding='utf-8') as fin: template_obj = Template(fin.read(), **env) template_obj.filename = template self._log.info('Loaded %s', template) return template_obj
Load a Jinja2 template from the source directory.
modelforge/index.py
load_template
src-d/modelforge
19
python
def load_template(self, template: str) -> Template: env = dict(trim_blocks=True, lstrip_blocks=True, keep_trailing_newline=False) jinja2_ext = '.jinja2' if (not template.endswith(jinja2_ext)): self._log.error(('Template file name must end with %s' % jinja2_ext)) raise ValueError if (not template[:(- len(jinja2_ext))].endswith('.md')): self._log.error('Template file should be a Markdown file') raise ValueError if (not os.path.isabs(template)): template = os.path.join(os.path.dirname(__file__), template) with open(template, encoding='utf-8') as fin: template_obj = Template(fin.read(), **env) template_obj.filename = template self._log.info('Loaded %s', template) return template_obj
def load_template(self, template: str) -> Template: env = dict(trim_blocks=True, lstrip_blocks=True, keep_trailing_newline=False) jinja2_ext = '.jinja2' if (not template.endswith(jinja2_ext)): self._log.error(('Template file name must end with %s' % jinja2_ext)) raise ValueError if (not template[:(- len(jinja2_ext))].endswith('.md')): self._log.error('Template file should be a Markdown file') raise ValueError if (not os.path.isabs(template)): template = os.path.join(os.path.dirname(__file__), template) with open(template, encoding='utf-8') as fin: template_obj = Template(fin.read(), **env) template_obj.filename = template self._log.info('Loaded %s', template) return template_obj<|docstring|>Load a Jinja2 template from the source directory.<|endoftext|>
14b997842c93abcf6db655594fda8411ce5583e158d0a2f2c46157412139870f
def makeId(timestamp=0, machine=0, flow=0): '\n using unix style timestamp, not python timestamp\n ' timestamp -= _base return (((timestamp << 13) | (machine << 8)) | flow)
using unix style timestamp, not python timestamp
backend/app/snowflake.py
makeId
GJCav/thywy
8
python
def makeId(timestamp=0, machine=0, flow=0): '\n \n ' timestamp -= _base return (((timestamp << 13) | (machine << 8)) | flow)
def makeId(timestamp=0, machine=0, flow=0): '\n \n ' timestamp -= _base return (((timestamp << 13) | (machine << 8)) | flow)<|docstring|>using unix style timestamp, not python timestamp<|endoftext|>
88628d1185b0e200ee33c33a6be3c478dba7d523bc1e75f76234fed38b1b3195
def _preprocess(): '\n Before starting the jobs, first create the image files required for\n determining the chunk starting frequencies (i.e., "_tinyimg.sumwt"). If\n this file already exists it will move on.\n ' _get_config()
Before starting the jobs, first create the image files required for determining the chunk starting frequencies (i.e., "_tinyimg.sumwt"). If this file already exists it will move on.
pipe_scripts/run_pipe_cb68_cs.py
_preprocess
autocorr/faust_line_imaging
1
python
def _preprocess(): '\n Before starting the jobs, first create the image files required for\n determining the chunk starting frequencies (i.e., "_tinyimg.sumwt"). If\n this file already exists it will move on.\n ' _get_config()
def _preprocess(): '\n Before starting the jobs, first create the image files required for\n determining the chunk starting frequencies (i.e., "_tinyimg.sumwt"). If\n this file already exists it will move on.\n ' _get_config()<|docstring|>Before starting the jobs, first create the image files required for determining the chunk starting frequencies (i.e., "_tinyimg.sumwt"). If this file already exists it will move on.<|endoftext|>
ff041ba556cf36c348c435ee2f2aebb32674fe31b4e1b0542708ad80a83c3626
def _run_subset(batch_ix): '\n Run the pipeline for a subset of chunks. Chunks are processed in stride,\n e.g.: 0, 10, 20... or 1, 11, 21...\n\n Parameters\n ----------\n batch_ix : int\n Batch index number. For 100 chunks and 10 batches, ``batch_ix=0``\n would process chunks 0, 10, ..., 90.\n ' nbatches = _NBATCHES batch_ix = int(batch_ix) assert (batch_ix < nbatches) log_post(':: Running batch index: {0}'.format(batch_ix)) log_post('-- Batch: {0} / {1}'.format((batch_ix + 1), nbatches)) (full_config, chunked_configs) = _get_config() for config in chunked_configs[batch_ix::nbatches]: config.run_pipeline(ext=_RUN_EXT)
Run the pipeline for a subset of chunks. Chunks are processed in stride, e.g.: 0, 10, 20... or 1, 11, 21... Parameters ---------- batch_ix : int Batch index number. For 100 chunks and 10 batches, ``batch_ix=0`` would process chunks 0, 10, ..., 90.
pipe_scripts/run_pipe_cb68_cs.py
_run_subset
autocorr/faust_line_imaging
1
python
def _run_subset(batch_ix): '\n Run the pipeline for a subset of chunks. Chunks are processed in stride,\n e.g.: 0, 10, 20... or 1, 11, 21...\n\n Parameters\n ----------\n batch_ix : int\n Batch index number. For 100 chunks and 10 batches, ``batch_ix=0``\n would process chunks 0, 10, ..., 90.\n ' nbatches = _NBATCHES batch_ix = int(batch_ix) assert (batch_ix < nbatches) log_post(':: Running batch index: {0}'.format(batch_ix)) log_post('-- Batch: {0} / {1}'.format((batch_ix + 1), nbatches)) (full_config, chunked_configs) = _get_config() for config in chunked_configs[batch_ix::nbatches]: config.run_pipeline(ext=_RUN_EXT)
def _run_subset(batch_ix): '\n Run the pipeline for a subset of chunks. Chunks are processed in stride,\n e.g.: 0, 10, 20... or 1, 11, 21...\n\n Parameters\n ----------\n batch_ix : int\n Batch index number. For 100 chunks and 10 batches, ``batch_ix=0``\n would process chunks 0, 10, ..., 90.\n ' nbatches = _NBATCHES batch_ix = int(batch_ix) assert (batch_ix < nbatches) log_post(':: Running batch index: {0}'.format(batch_ix)) log_post('-- Batch: {0} / {1}'.format((batch_ix + 1), nbatches)) (full_config, chunked_configs) = _get_config() for config in chunked_configs[batch_ix::nbatches]: config.run_pipeline(ext=_RUN_EXT)<|docstring|>Run the pipeline for a subset of chunks. Chunks are processed in stride, e.g.: 0, 10, 20... or 1, 11, 21... Parameters ---------- batch_ix : int Batch index number. For 100 chunks and 10 batches, ``batch_ix=0`` would process chunks 0, 10, ..., 90.<|endoftext|>
aeced38063a2cb92e260b39da1c1b00f02ea6488ecafb8ccf6656fe09805ac3a
@property def template_url(self): "Template url for the stack resource.\n\n When stack resource is a TemplateResource, it's the template\n location. For group resources like ResourceGroup where the\n template is constructed dynamically, it's just a placeholder.\n " return 'nested_stack'
Template url for the stack resource. When stack resource is a TemplateResource, it's the template location. For group resources like ResourceGroup where the template is constructed dynamically, it's just a placeholder.
heat/engine/resources/stack_resource.py
template_url
coreycb/heat
1
python
@property def template_url(self): "Template url for the stack resource.\n\n When stack resource is a TemplateResource, it's the template\n location. For group resources like ResourceGroup where the\n template is constructed dynamically, it's just a placeholder.\n " return 'nested_stack'
@property def template_url(self): "Template url for the stack resource.\n\n When stack resource is a TemplateResource, it's the template\n location. For group resources like ResourceGroup where the\n template is constructed dynamically, it's just a placeholder.\n " return 'nested_stack'<|docstring|>Template url for the stack resource. When stack resource is a TemplateResource, it's the template location. For group resources like ResourceGroup where the template is constructed dynamically, it's just a placeholder.<|endoftext|>
9dfb63efc41610ec0b102e1d57dec02eed1d3891e007cd0bf01b1d6cbebfc6c5
def has_nested(self): 'Return True if the resource has an existing nested stack.' return ((self.resource_id is not None) or (self._nested is not None))
Return True if the resource has an existing nested stack.
heat/engine/resources/stack_resource.py
has_nested
coreycb/heat
1
python
def has_nested(self): return ((self.resource_id is not None) or (self._nested is not None))
def has_nested(self): return ((self.resource_id is not None) or (self._nested is not None))<|docstring|>Return True if the resource has an existing nested stack.<|endoftext|>
0b7d38aac587524f21bb32e5f3ec07392b2be0907199d44a1b89246d29c3e7e3
def nested(self): 'Return a Stack object representing the nested (child) stack.\n\n If we catch NotFound exception when loading, return None.\n ' if ((self._nested is None) and (self.resource_id is not None)): try: self._nested = parser.Stack.load(self.context, self.resource_id) except exception.NotFound: return None return self._nested
Return a Stack object representing the nested (child) stack. If we catch NotFound exception when loading, return None.
heat/engine/resources/stack_resource.py
nested
coreycb/heat
1
python
def nested(self): 'Return a Stack object representing the nested (child) stack.\n\n If we catch NotFound exception when loading, return None.\n ' if ((self._nested is None) and (self.resource_id is not None)): try: self._nested = parser.Stack.load(self.context, self.resource_id) except exception.NotFound: return None return self._nested
def nested(self): 'Return a Stack object representing the nested (child) stack.\n\n If we catch NotFound exception when loading, return None.\n ' if ((self._nested is None) and (self.resource_id is not None)): try: self._nested = parser.Stack.load(self.context, self.resource_id) except exception.NotFound: return None return self._nested<|docstring|>Return a Stack object representing the nested (child) stack. If we catch NotFound exception when loading, return None.<|endoftext|>
22bb2fba51818072343877d2c369947f35404831d4185c16fc949226331cc7ec
def child_template(self): 'Default implementation to get the child template.\n\n Resources that inherit from StackResource should override this method\n with specific details about the template used by them.\n ' raise NotImplementedError()
Default implementation to get the child template. Resources that inherit from StackResource should override this method with specific details about the template used by them.
heat/engine/resources/stack_resource.py
child_template
coreycb/heat
1
python
def child_template(self): 'Default implementation to get the child template.\n\n Resources that inherit from StackResource should override this method\n with specific details about the template used by them.\n ' raise NotImplementedError()
def child_template(self): 'Default implementation to get the child template.\n\n Resources that inherit from StackResource should override this method\n with specific details about the template used by them.\n ' raise NotImplementedError()<|docstring|>Default implementation to get the child template. Resources that inherit from StackResource should override this method with specific details about the template used by them.<|endoftext|>
7d23ec61561129d1356f2e0eb2ec9b9ed0f1c21009abc15ca2aefc15ed4188f0
def child_params(self): 'Default implementation to get the child params.\n\n Resources that inherit from StackResource should override this method\n with specific details about the parameters used by them.\n ' raise NotImplementedError()
Default implementation to get the child params. Resources that inherit from StackResource should override this method with specific details about the parameters used by them.
heat/engine/resources/stack_resource.py
child_params
coreycb/heat
1
python
def child_params(self): 'Default implementation to get the child params.\n\n Resources that inherit from StackResource should override this method\n with specific details about the parameters used by them.\n ' raise NotImplementedError()
def child_params(self): 'Default implementation to get the child params.\n\n Resources that inherit from StackResource should override this method\n with specific details about the parameters used by them.\n ' raise NotImplementedError()<|docstring|>Default implementation to get the child params. Resources that inherit from StackResource should override this method with specific details about the parameters used by them.<|endoftext|>
b71404ae2ffb5f9e3d095971518b0abf06b42d89b172564b23814699410dd46a
def preview(self): 'Preview a StackResource as resources within a Stack.\n\n This method overrides the original Resource.preview to return a preview\n of all the resources contained in this Stack. For this to be possible,\n the specific resources need to override both ``child_template`` and\n ``child_params`` with specific information to allow the stack to be\n parsed correctly. If any of these methods is missing, the entire\n StackResource will be returned as if it were a regular Resource.\n ' try: child_template = self.child_template() params = self.child_params() except NotImplementedError: class_name = reflection.get_class_name(self, fully_qualified=False) LOG.warning("Preview of '%s' not yet implemented", class_name) return self name = ('%s-%s' % (self.stack.name, self.name)) self._nested = self._parse_nested_stack(name, child_template, params) return self.nested().preview_resources()
Preview a StackResource as resources within a Stack. This method overrides the original Resource.preview to return a preview of all the resources contained in this Stack. For this to be possible, the specific resources need to override both ``child_template`` and ``child_params`` with specific information to allow the stack to be parsed correctly. If any of these methods is missing, the entire StackResource will be returned as if it were a regular Resource.
heat/engine/resources/stack_resource.py
preview
coreycb/heat
1
python
def preview(self): 'Preview a StackResource as resources within a Stack.\n\n This method overrides the original Resource.preview to return a preview\n of all the resources contained in this Stack. For this to be possible,\n the specific resources need to override both ``child_template`` and\n ``child_params`` with specific information to allow the stack to be\n parsed correctly. If any of these methods is missing, the entire\n StackResource will be returned as if it were a regular Resource.\n ' try: child_template = self.child_template() params = self.child_params() except NotImplementedError: class_name = reflection.get_class_name(self, fully_qualified=False) LOG.warning("Preview of '%s' not yet implemented", class_name) return self name = ('%s-%s' % (self.stack.name, self.name)) self._nested = self._parse_nested_stack(name, child_template, params) return self.nested().preview_resources()
def preview(self): 'Preview a StackResource as resources within a Stack.\n\n This method overrides the original Resource.preview to return a preview\n of all the resources contained in this Stack. For this to be possible,\n the specific resources need to override both ``child_template`` and\n ``child_params`` with specific information to allow the stack to be\n parsed correctly. If any of these methods is missing, the entire\n StackResource will be returned as if it were a regular Resource.\n ' try: child_template = self.child_template() params = self.child_params() except NotImplementedError: class_name = reflection.get_class_name(self, fully_qualified=False) LOG.warning("Preview of '%s' not yet implemented", class_name) return self name = ('%s-%s' % (self.stack.name, self.name)) self._nested = self._parse_nested_stack(name, child_template, params) return self.nested().preview_resources()<|docstring|>Preview a StackResource as resources within a Stack. This method overrides the original Resource.preview to return a preview of all the resources contained in this Stack. For this to be possible, the specific resources need to override both ``child_template`` and ``child_params`` with specific information to allow the stack to be parsed correctly. If any of these methods is missing, the entire StackResource will be returned as if it were a regular Resource.<|endoftext|>
c8c7f6512a78d1da077281442b84b609688c274ebff0b3148d6e1081892d21a5
def get_nested_parameters_stack(self): 'Return a stack for schema validation.\n\n This returns a stack to be introspected for building parameters schema.\n It can be customized by subclass to return a restricted version of what\n will be running.\n ' try: child_template = self.child_template() params = self.child_params() except NotImplementedError: class_name = reflection.get_class_name(self, fully_qualified=False) LOG.warning("Nested parameters of '%s' not yet implemented", class_name) return name = ('%s-%s' % (self.stack.name, self.name)) return self._parse_nested_stack(name, child_template, params)
Return a stack for schema validation. This returns a stack to be introspected for building parameters schema. It can be customized by subclass to return a restricted version of what will be running.
heat/engine/resources/stack_resource.py
get_nested_parameters_stack
coreycb/heat
1
python
def get_nested_parameters_stack(self): 'Return a stack for schema validation.\n\n This returns a stack to be introspected for building parameters schema.\n It can be customized by subclass to return a restricted version of what\n will be running.\n ' try: child_template = self.child_template() params = self.child_params() except NotImplementedError: class_name = reflection.get_class_name(self, fully_qualified=False) LOG.warning("Nested parameters of '%s' not yet implemented", class_name) return name = ('%s-%s' % (self.stack.name, self.name)) return self._parse_nested_stack(name, child_template, params)
def get_nested_parameters_stack(self): 'Return a stack for schema validation.\n\n This returns a stack to be introspected for building parameters schema.\n It can be customized by subclass to return a restricted version of what\n will be running.\n ' try: child_template = self.child_template() params = self.child_params() except NotImplementedError: class_name = reflection.get_class_name(self, fully_qualified=False) LOG.warning("Nested parameters of '%s' not yet implemented", class_name) return name = ('%s-%s' % (self.stack.name, self.name)) return self._parse_nested_stack(name, child_template, params)<|docstring|>Return a stack for schema validation. This returns a stack to be introspected for building parameters schema. It can be customized by subclass to return a restricted version of what will be running.<|endoftext|>
a9efad6367bb2f59b87a273912534a583ec49107ecf5a2c1e7b7de1c98de7101
def child_template_files(self, child_env): 'Default implementation to get the files map for child template.' return self.stack.t.files
Default implementation to get the files map for child template.
heat/engine/resources/stack_resource.py
child_template_files
coreycb/heat
1
python
def child_template_files(self, child_env): return self.stack.t.files
def child_template_files(self, child_env): return self.stack.t.files<|docstring|>Default implementation to get the files map for child template.<|endoftext|>
a8811583217060a51c324d60b129d71661bb1bcac0339a3b7c5769dafaaa6bd6
def create_with_template(self, child_template, user_params=None, timeout_mins=None, adopt_data=None): 'Create the nested stack with the given template.' name = self.physical_resource_name() if (timeout_mins is None): timeout_mins = self.stack.timeout_mins stack_user_project_id = self.stack.stack_user_project_id kwargs = self._stack_kwargs(user_params, child_template, adopt_data) adopt_data_str = None if (adopt_data is not None): if ('environment' not in adopt_data): adopt_data['environment'] = kwargs['params'] if ('template' not in adopt_data): if isinstance(child_template, template.Template): adopt_data['template'] = child_template.t else: adopt_data['template'] = child_template adopt_data_str = json.dumps(adopt_data) args = {rpc_api.PARAM_TIMEOUT: timeout_mins, rpc_api.PARAM_DISABLE_ROLLBACK: True, rpc_api.PARAM_ADOPT_STACK_DATA: adopt_data_str} kwargs.update({'stack_name': name, 'args': args, 'environment_files': None, 'owner_id': self.stack.id, 'user_creds_id': self.stack.user_creds_id, 'stack_user_project_id': stack_user_project_id, 'nested_depth': self._child_nested_depth(), 'parent_resource_name': self.name}) with self.translate_remote_exceptions: try: result = self.rpc_client()._create_stack(self.context, **kwargs) except exception.HeatException: with excutils.save_and_reraise_exception(): if (adopt_data is None): raw_template.RawTemplate.delete(self.context, kwargs['template_id']) self.resource_id_set(result['stack_id'])
Create the nested stack with the given template.
heat/engine/resources/stack_resource.py
create_with_template
coreycb/heat
1
python
def create_with_template(self, child_template, user_params=None, timeout_mins=None, adopt_data=None): name = self.physical_resource_name() if (timeout_mins is None): timeout_mins = self.stack.timeout_mins stack_user_project_id = self.stack.stack_user_project_id kwargs = self._stack_kwargs(user_params, child_template, adopt_data) adopt_data_str = None if (adopt_data is not None): if ('environment' not in adopt_data): adopt_data['environment'] = kwargs['params'] if ('template' not in adopt_data): if isinstance(child_template, template.Template): adopt_data['template'] = child_template.t else: adopt_data['template'] = child_template adopt_data_str = json.dumps(adopt_data) args = {rpc_api.PARAM_TIMEOUT: timeout_mins, rpc_api.PARAM_DISABLE_ROLLBACK: True, rpc_api.PARAM_ADOPT_STACK_DATA: adopt_data_str} kwargs.update({'stack_name': name, 'args': args, 'environment_files': None, 'owner_id': self.stack.id, 'user_creds_id': self.stack.user_creds_id, 'stack_user_project_id': stack_user_project_id, 'nested_depth': self._child_nested_depth(), 'parent_resource_name': self.name}) with self.translate_remote_exceptions: try: result = self.rpc_client()._create_stack(self.context, **kwargs) except exception.HeatException: with excutils.save_and_reraise_exception(): if (adopt_data is None): raw_template.RawTemplate.delete(self.context, kwargs['template_id']) self.resource_id_set(result['stack_id'])
def create_with_template(self, child_template, user_params=None, timeout_mins=None, adopt_data=None): name = self.physical_resource_name() if (timeout_mins is None): timeout_mins = self.stack.timeout_mins stack_user_project_id = self.stack.stack_user_project_id kwargs = self._stack_kwargs(user_params, child_template, adopt_data) adopt_data_str = None if (adopt_data is not None): if ('environment' not in adopt_data): adopt_data['environment'] = kwargs['params'] if ('template' not in adopt_data): if isinstance(child_template, template.Template): adopt_data['template'] = child_template.t else: adopt_data['template'] = child_template adopt_data_str = json.dumps(adopt_data) args = {rpc_api.PARAM_TIMEOUT: timeout_mins, rpc_api.PARAM_DISABLE_ROLLBACK: True, rpc_api.PARAM_ADOPT_STACK_DATA: adopt_data_str} kwargs.update({'stack_name': name, 'args': args, 'environment_files': None, 'owner_id': self.stack.id, 'user_creds_id': self.stack.user_creds_id, 'stack_user_project_id': stack_user_project_id, 'nested_depth': self._child_nested_depth(), 'parent_resource_name': self.name}) with self.translate_remote_exceptions: try: result = self.rpc_client()._create_stack(self.context, **kwargs) except exception.HeatException: with excutils.save_and_reraise_exception(): if (adopt_data is None): raw_template.RawTemplate.delete(self.context, kwargs['template_id']) self.resource_id_set(result['stack_id'])<|docstring|>Create the nested stack with the given template.<|endoftext|>
774b0b0bd109f97a82be53d5742367223dffd3aad95312ffdac62fdca4c0423b
def update_with_template(self, child_template, user_params=None, timeout_mins=None): 'Update the nested stack with the new template.' if (self.id is None): self.store() if (self.stack.action == self.stack.ROLLBACK): if self._try_rollback(): LOG.info('Triggered nested stack %s rollback', self.physical_resource_name()) return {'target_action': self.stack.ROLLBACK} if (self.resource_id is None): def _check_for_completion(): while (not self.check_create_complete()): (yield) empty_temp = template_format.parse("heat_template_version: '2013-05-23'") self.create_with_template(empty_temp, {}) checker = scheduler.TaskRunner(_check_for_completion) checker(timeout=self.stack.timeout_secs()) if (timeout_mins is None): timeout_mins = self.stack.timeout_mins try: status_data = stack_object.Stack.get_status(self.context, self.resource_id) except exception.NotFound: raise resource.UpdateReplace(self) (action, status, status_reason, updated_time) = status_data kwargs = self._stack_kwargs(user_params, child_template) cookie = {'previous': {'updated_at': updated_time, 'state': (action, status)}} kwargs.update({'stack_identity': dict(self.nested_identifier()), 'args': {rpc_api.PARAM_TIMEOUT: timeout_mins, rpc_api.PARAM_CONVERGE: self.converge}}) with self.translate_remote_exceptions: try: self.rpc_client()._update_stack(self.context, **kwargs) except exception.HeatException: with excutils.save_and_reraise_exception(): raw_template.RawTemplate.delete(self.context, kwargs['template_id']) return cookie
Update the nested stack with the new template.
heat/engine/resources/stack_resource.py
update_with_template
coreycb/heat
1
python
def update_with_template(self, child_template, user_params=None, timeout_mins=None): if (self.id is None): self.store() if (self.stack.action == self.stack.ROLLBACK): if self._try_rollback(): LOG.info('Triggered nested stack %s rollback', self.physical_resource_name()) return {'target_action': self.stack.ROLLBACK} if (self.resource_id is None): def _check_for_completion(): while (not self.check_create_complete()): (yield) empty_temp = template_format.parse("heat_template_version: '2013-05-23'") self.create_with_template(empty_temp, {}) checker = scheduler.TaskRunner(_check_for_completion) checker(timeout=self.stack.timeout_secs()) if (timeout_mins is None): timeout_mins = self.stack.timeout_mins try: status_data = stack_object.Stack.get_status(self.context, self.resource_id) except exception.NotFound: raise resource.UpdateReplace(self) (action, status, status_reason, updated_time) = status_data kwargs = self._stack_kwargs(user_params, child_template) cookie = {'previous': {'updated_at': updated_time, 'state': (action, status)}} kwargs.update({'stack_identity': dict(self.nested_identifier()), 'args': {rpc_api.PARAM_TIMEOUT: timeout_mins, rpc_api.PARAM_CONVERGE: self.converge}}) with self.translate_remote_exceptions: try: self.rpc_client()._update_stack(self.context, **kwargs) except exception.HeatException: with excutils.save_and_reraise_exception(): raw_template.RawTemplate.delete(self.context, kwargs['template_id']) return cookie
def update_with_template(self, child_template, user_params=None, timeout_mins=None): if (self.id is None): self.store() if (self.stack.action == self.stack.ROLLBACK): if self._try_rollback(): LOG.info('Triggered nested stack %s rollback', self.physical_resource_name()) return {'target_action': self.stack.ROLLBACK} if (self.resource_id is None): def _check_for_completion(): while (not self.check_create_complete()): (yield) empty_temp = template_format.parse("heat_template_version: '2013-05-23'") self.create_with_template(empty_temp, {}) checker = scheduler.TaskRunner(_check_for_completion) checker(timeout=self.stack.timeout_secs()) if (timeout_mins is None): timeout_mins = self.stack.timeout_mins try: status_data = stack_object.Stack.get_status(self.context, self.resource_id) except exception.NotFound: raise resource.UpdateReplace(self) (action, status, status_reason, updated_time) = status_data kwargs = self._stack_kwargs(user_params, child_template) cookie = {'previous': {'updated_at': updated_time, 'state': (action, status)}} kwargs.update({'stack_identity': dict(self.nested_identifier()), 'args': {rpc_api.PARAM_TIMEOUT: timeout_mins, rpc_api.PARAM_CONVERGE: self.converge}}) with self.translate_remote_exceptions: try: self.rpc_client()._update_stack(self.context, **kwargs) except exception.HeatException: with excutils.save_and_reraise_exception(): raw_template.RawTemplate.delete(self.context, kwargs['template_id']) return cookie<|docstring|>Update the nested stack with the new template.<|endoftext|>
86538e0634f5d35cc1467198b04ec80a92eab948cb0a5766571fdc3ab6c37e29
def delete_nested(self): 'Delete the nested stack.' stack_identity = self.nested_identifier() if (stack_identity is None): return with self.rpc_client().ignore_error_by_name('EntityNotFound'): if self.abandon_in_progress: self.rpc_client().abandon_stack(self.context, stack_identity) else: self.rpc_client().delete_stack(self.context, stack_identity, cast=False)
Delete the nested stack.
heat/engine/resources/stack_resource.py
delete_nested
coreycb/heat
1
python
def delete_nested(self): stack_identity = self.nested_identifier() if (stack_identity is None): return with self.rpc_client().ignore_error_by_name('EntityNotFound'): if self.abandon_in_progress: self.rpc_client().abandon_stack(self.context, stack_identity) else: self.rpc_client().delete_stack(self.context, stack_identity, cast=False)
def delete_nested(self): stack_identity = self.nested_identifier() if (stack_identity is None): return with self.rpc_client().ignore_error_by_name('EntityNotFound'): if self.abandon_in_progress: self.rpc_client().abandon_stack(self.context, stack_identity) else: self.rpc_client().delete_stack(self.context, stack_identity, cast=False)<|docstring|>Delete the nested stack.<|endoftext|>
a831ce64e4be732cde89438498dfddc79bdf516dc09e1db90552e00a2a2e5927
def get_output(self, op): 'Return the specified Output value from the nested stack.\n\n If the output key does not exist, raise a NotFound exception.\n ' if ((self._outputs is None) or ((op in self._outputs) and (rpc_api.OUTPUT_ERROR not in self._outputs[op]) and (self._outputs[op].get(rpc_api.OUTPUT_VALUE) is None))): stack_identity = self.nested_identifier() if (stack_identity is None): return stack = self.rpc_client().show_stack(self.context, dict(stack_identity)) if (not stack): return outputs = (stack[0].get(rpc_api.STACK_OUTPUTS) or {}) self._outputs = {o[rpc_api.OUTPUT_KEY]: o for o in outputs} if (op not in self._outputs): raise exception.NotFound((_('Specified output key %s not found.') % op)) output_data = self._outputs[op] if (rpc_api.OUTPUT_ERROR in output_data): raise exception.TemplateOutputError(resource=self.name, attribute=op, message=output_data[rpc_api.OUTPUT_ERROR]) return output_data[rpc_api.OUTPUT_VALUE]
Return the specified Output value from the nested stack. If the output key does not exist, raise a NotFound exception.
heat/engine/resources/stack_resource.py
get_output
coreycb/heat
1
python
def get_output(self, op): 'Return the specified Output value from the nested stack.\n\n If the output key does not exist, raise a NotFound exception.\n ' if ((self._outputs is None) or ((op in self._outputs) and (rpc_api.OUTPUT_ERROR not in self._outputs[op]) and (self._outputs[op].get(rpc_api.OUTPUT_VALUE) is None))): stack_identity = self.nested_identifier() if (stack_identity is None): return stack = self.rpc_client().show_stack(self.context, dict(stack_identity)) if (not stack): return outputs = (stack[0].get(rpc_api.STACK_OUTPUTS) or {}) self._outputs = {o[rpc_api.OUTPUT_KEY]: o for o in outputs} if (op not in self._outputs): raise exception.NotFound((_('Specified output key %s not found.') % op)) output_data = self._outputs[op] if (rpc_api.OUTPUT_ERROR in output_data): raise exception.TemplateOutputError(resource=self.name, attribute=op, message=output_data[rpc_api.OUTPUT_ERROR]) return output_data[rpc_api.OUTPUT_VALUE]
def get_output(self, op): 'Return the specified Output value from the nested stack.\n\n If the output key does not exist, raise a NotFound exception.\n ' if ((self._outputs is None) or ((op in self._outputs) and (rpc_api.OUTPUT_ERROR not in self._outputs[op]) and (self._outputs[op].get(rpc_api.OUTPUT_VALUE) is None))): stack_identity = self.nested_identifier() if (stack_identity is None): return stack = self.rpc_client().show_stack(self.context, dict(stack_identity)) if (not stack): return outputs = (stack[0].get(rpc_api.STACK_OUTPUTS) or {}) self._outputs = {o[rpc_api.OUTPUT_KEY]: o for o in outputs} if (op not in self._outputs): raise exception.NotFound((_('Specified output key %s not found.') % op)) output_data = self._outputs[op] if (rpc_api.OUTPUT_ERROR in output_data): raise exception.TemplateOutputError(resource=self.name, attribute=op, message=output_data[rpc_api.OUTPUT_ERROR]) return output_data[rpc_api.OUTPUT_VALUE]<|docstring|>Return the specified Output value from the nested stack. If the output key does not exist, raise a NotFound exception.<|endoftext|>
7c5b4f047c0d737663146976c350af35d6813bf3ee16c7293dde4b0a1f0850f5
def slow_tqdm(*args, **kwargs): ' Return a tqdm progress bar with infrequent updates. ' return tqdm.tqdm(*args, mininterval=10, **kwargs)
Return a tqdm progress bar with infrequent updates.
bioslds/run_hyper_snippets.py
slow_tqdm
ttesileanu/bio-time-series
0
python
def slow_tqdm(*args, **kwargs): ' ' return tqdm.tqdm(*args, mininterval=10, **kwargs)
def slow_tqdm(*args, **kwargs): ' ' return tqdm.tqdm(*args, mininterval=10, **kwargs)<|docstring|>Return a tqdm progress bar with infrequent updates.<|endoftext|>
13b023ed342f57aac69aa9b9a05d5b2846a61fc61d35e7955da0fb6dc411f42b
def make_bio_wta_with_stable_initial(*args, **kwargs) -> BioWTARegressor: ' Call the BioWTARegressor constructor, ensuring that the initial coefficients are\n chosen to correspond to stable AR processes.\n ' weights = [make_random_arma(kwargs['n_features'], 0, rng=kwargs['rng']).a for _ in range(kwargs['n_models'])] return BioWTARegressor(*args, weights=weights, **kwargs)
Call the BioWTARegressor constructor, ensuring that the initial coefficients are chosen to correspond to stable AR processes.
bioslds/run_hyper_snippets.py
make_bio_wta_with_stable_initial
ttesileanu/bio-time-series
0
python
def make_bio_wta_with_stable_initial(*args, **kwargs) -> BioWTARegressor: ' Call the BioWTARegressor constructor, ensuring that the initial coefficients are\n chosen to correspond to stable AR processes.\n ' weights = [make_random_arma(kwargs['n_features'], 0, rng=kwargs['rng']).a for _ in range(kwargs['n_models'])] return BioWTARegressor(*args, weights=weights, **kwargs)
def make_bio_wta_with_stable_initial(*args, **kwargs) -> BioWTARegressor: ' Call the BioWTARegressor constructor, ensuring that the initial coefficients are\n chosen to correspond to stable AR processes.\n ' weights = [make_random_arma(kwargs['n_features'], 0, rng=kwargs['rng']).a for _ in range(kwargs['n_models'])] return BioWTARegressor(*args, weights=weights, **kwargs)<|docstring|>Call the BioWTARegressor constructor, ensuring that the initial coefficients are chosen to correspond to stable AR processes.<|endoftext|>
15744881e855beb7a67a404fd2ddf33199afec0703fea5c08df2c6c1f1030cb9
def get_metadata(metapath): ' Returns the metadata as a pandas dataframe, translating strings\n to simpler boolean flags.\n ' meta = pd.read_csv(metapath, index_col='docid', dtype='object') return meta
Returns the metadata as a pandas dataframe, translating strings to simpler boolean flags.
code/implementpagemodel.py
get_metadata
tedunderwood/hathimetadata
4
python
def get_metadata(metapath): ' Returns the metadata as a pandas dataframe, translating strings\n to simpler boolean flags.\n ' meta = pd.read_csv(metapath, index_col='docid', dtype='object') return meta
def get_metadata(metapath): ' Returns the metadata as a pandas dataframe, translating strings\n to simpler boolean flags.\n ' meta = pd.read_csv(metapath, index_col='docid', dtype='object') return meta<|docstring|>Returns the metadata as a pandas dataframe, translating strings to simpler boolean flags.<|endoftext|>
bdd18c834c4e12351ba4695952eaef70a039c85d6a7f516bf8a39343ced13256
def get_counts_4pages(path, docid): ' Gets a dictionary of wordcounts.\n\n Adjusted to handle page instances.\n Same logic used in trainapagemodel, but\n simplified for one volume.\n ' volume = parser.PagelistFromJson(path, docid) pagecounts = volume.get_feature_list() error = 'success' counts = dict() pageids = [] for (idx, page) in enumerate(pagecounts): pageid = ((docid + '||') + str(idx)) pageids.append(pageid) counts[pageid] = page return (counts, pageids, error)
Gets a dictionary of wordcounts. Adjusted to handle page instances. Same logic used in trainapagemodel, but simplified for one volume.
code/implementpagemodel.py
get_counts_4pages
tedunderwood/hathimetadata
4
python
def get_counts_4pages(path, docid): ' Gets a dictionary of wordcounts.\n\n Adjusted to handle page instances.\n Same logic used in trainapagemodel, but\n simplified for one volume.\n ' volume = parser.PagelistFromJson(path, docid) pagecounts = volume.get_feature_list() error = 'success' counts = dict() pageids = [] for (idx, page) in enumerate(pagecounts): pageid = ((docid + '||') + str(idx)) pageids.append(pageid) counts[pageid] = page return (counts, pageids, error)
def get_counts_4pages(path, docid): ' Gets a dictionary of wordcounts.\n\n Adjusted to handle page instances.\n Same logic used in trainapagemodel, but\n simplified for one volume.\n ' volume = parser.PagelistFromJson(path, docid) pagecounts = volume.get_feature_list() error = 'success' counts = dict() pageids = [] for (idx, page) in enumerate(pagecounts): pageid = ((docid + '||') + str(idx)) pageids.append(pageid) counts[pageid] = page return (counts, pageids, error)<|docstring|>Gets a dictionary of wordcounts. Adjusted to handle page instances. Same logic used in trainapagemodel, but simplified for one volume.<|endoftext|>
e01a676ab2c28274d2af93e759c07383de6ecebceafcc325618506a93d36275b
def predict_volume(model, allpageIDs, counts, docid): ' what it says on the label; returns a dictionary\n with page-level predictions for the volume; this will\n eventually be written out in json format\n ' vocabulary = model['vocabulary'] df = pages2frame(vocabulary, allpageIDs, counts) pagepredictions = prediction_for_pages(model, df) (firstpage, lastpage) = trimends(meansmooth(pagepredictions)) jsonobject = dict() jsonobject['docid'] = docid jsonobject['numpages'] = len(pagepredictions) jsonobject['pagepredictions'] = pagepredictions jsonobject['firstpage'] = firstpage jsonobject['lastpage'] = lastpage return jsonobject
what it says on the label; returns a dictionary with page-level predictions for the volume; this will eventually be written out in json format
code/implementpagemodel.py
predict_volume
tedunderwood/hathimetadata
4
python
def predict_volume(model, allpageIDs, counts, docid): ' what it says on the label; returns a dictionary\n with page-level predictions for the volume; this will\n eventually be written out in json format\n ' vocabulary = model['vocabulary'] df = pages2frame(vocabulary, allpageIDs, counts) pagepredictions = prediction_for_pages(model, df) (firstpage, lastpage) = trimends(meansmooth(pagepredictions)) jsonobject = dict() jsonobject['docid'] = docid jsonobject['numpages'] = len(pagepredictions) jsonobject['pagepredictions'] = pagepredictions jsonobject['firstpage'] = firstpage jsonobject['lastpage'] = lastpage return jsonobject
def predict_volume(model, allpageIDs, counts, docid): ' what it says on the label; returns a dictionary\n with page-level predictions for the volume; this will\n eventually be written out in json format\n ' vocabulary = model['vocabulary'] df = pages2frame(vocabulary, allpageIDs, counts) pagepredictions = prediction_for_pages(model, df) (firstpage, lastpage) = trimends(meansmooth(pagepredictions)) jsonobject = dict() jsonobject['docid'] = docid jsonobject['numpages'] = len(pagepredictions) jsonobject['pagepredictions'] = pagepredictions jsonobject['firstpage'] = firstpage jsonobject['lastpage'] = lastpage return jsonobject<|docstring|>what it says on the label; returns a dictionary with page-level predictions for the volume; this will eventually be written out in json format<|endoftext|>
49132e789cdf3a47593e4023c30f2aadca44761784fa190a85768e9ee6dc8cfa
def pages2frame(vocabulary, allpageIDs, counts): ' Returns a pandas dataframe with feature counts for all the volumes\n to be used in this model. The dataframe is going to have an extra column\n that is used to group items for crossvalidation. E.g., if instances are pages,\n they might be grouped by volume ID for crossvalidating to avoid leaking info.\n If these are volumes, that might be the author ID.\n\n We expect positive and negative IDs to be the actual IDs of instances.\n\n Returns an unscaled data frame. Scaling is a separate step.\n ' df = dict() vocabset = set(vocabulary) for v in vocabulary: df[v] = pd.Series(np.zeros(len(allpageIDs)), index=allpageIDs) for pageid in allpageIDs: for (feature, count) in counts[pageid].items(): if (feature in vocabset): df[feature].loc[pageid] = count df = pd.DataFrame(df, index=allpageIDs) df = df[vocabulary] return df
Returns a pandas dataframe with feature counts for all the volumes to be used in this model. The dataframe is going to have an extra column that is used to group items for crossvalidation. E.g., if instances are pages, they might be grouped by volume ID for crossvalidating to avoid leaking info. If these are volumes, that might be the author ID. We expect positive and negative IDs to be the actual IDs of instances. Returns an unscaled data frame. Scaling is a separate step.
code/implementpagemodel.py
pages2frame
tedunderwood/hathimetadata
4
python
def pages2frame(vocabulary, allpageIDs, counts): ' Returns a pandas dataframe with feature counts for all the volumes\n to be used in this model. The dataframe is going to have an extra column\n that is used to group items for crossvalidation. E.g., if instances are pages,\n they might be grouped by volume ID for crossvalidating to avoid leaking info.\n If these are volumes, that might be the author ID.\n\n We expect positive and negative IDs to be the actual IDs of instances.\n\n Returns an unscaled data frame. Scaling is a separate step.\n ' df = dict() vocabset = set(vocabulary) for v in vocabulary: df[v] = pd.Series(np.zeros(len(allpageIDs)), index=allpageIDs) for pageid in allpageIDs: for (feature, count) in counts[pageid].items(): if (feature in vocabset): df[feature].loc[pageid] = count df = pd.DataFrame(df, index=allpageIDs) df = df[vocabulary] return df
def pages2frame(vocabulary, allpageIDs, counts): ' Returns a pandas dataframe with feature counts for all the volumes\n to be used in this model. The dataframe is going to have an extra column\n that is used to group items for crossvalidation. E.g., if instances are pages,\n they might be grouped by volume ID for crossvalidating to avoid leaking info.\n If these are volumes, that might be the author ID.\n\n We expect positive and negative IDs to be the actual IDs of instances.\n\n Returns an unscaled data frame. Scaling is a separate step.\n ' df = dict() vocabset = set(vocabulary) for v in vocabulary: df[v] = pd.Series(np.zeros(len(allpageIDs)), index=allpageIDs) for pageid in allpageIDs: for (feature, count) in counts[pageid].items(): if (feature in vocabset): df[feature].loc[pageid] = count df = pd.DataFrame(df, index=allpageIDs) df = df[vocabulary] return df<|docstring|>Returns a pandas dataframe with feature counts for all the volumes to be used in this model. The dataframe is going to have an extra column that is used to group items for crossvalidation. E.g., if instances are pages, they might be grouped by volume ID for crossvalidating to avoid leaking info. If these are volumes, that might be the author ID. We expect positive and negative IDs to be the actual IDs of instances. Returns an unscaled data frame. Scaling is a separate step.<|endoftext|>
8663c16488686101b3f527a013f957462dc6f3aa0fa57266afe0c74391f004c3
def trimends(inputseries): '\n Returns the first page and last page considered\n to belong to the specified genre. Note that this\n is normal world "first" and "last," not the fracked-up\n programming-world definition of ranges where "last"\n is the place you stop, aka last+1.\n ' binsequence = list() for element in inputseries: if (float(element) > 0.5): binsequence.append(1) else: binsequence.append(0) assert (len(binsequence) == len(inputseries)) if (len(binsequence) < 5): return (0, (len(binsequence) - 1)) newseq = ([1] * len(binsequence)) newseq[0] = binsequence[0] newseq[(- 1)] = binsequence[(- 1)] firstpage = 0 lastpage = (len(binsequence) - 1) if (binsequence[0] != 1): for i in range(1, (len(binsequence) - 1)): total = sum(binsequence[(i - 1):(i + 2)]) if (total < 2): newseq[i] = 0 else: newseq[i] = 1 firstpage = i break else: firstpage = 0 if (binsequence[(len(binsequence) - 1)] != 1): for i in range((len(binsequence) - 2), (- 1), (- 1)): total = sum(binsequence[(i - 1):(i + 2)]) if (total < 2): newseq[i] = 0 else: newseq[i] = 1 lastpage = i break else: lastpage = (len(binsequence) - 1) return (firstpage, lastpage)
Returns the first page and last page considered to belong to the specified genre. Note that this is normal world "first" and "last," not the fracked-up programming-world definition of ranges where "last" is the place you stop, aka last+1.
code/implementpagemodel.py
trimends
tedunderwood/hathimetadata
4
python
def trimends(inputseries): '\n Returns the first page and last page considered\n to belong to the specified genre. Note that this\n is normal world "first" and "last," not the fracked-up\n programming-world definition of ranges where "last"\n is the place you stop, aka last+1.\n ' binsequence = list() for element in inputseries: if (float(element) > 0.5): binsequence.append(1) else: binsequence.append(0) assert (len(binsequence) == len(inputseries)) if (len(binsequence) < 5): return (0, (len(binsequence) - 1)) newseq = ([1] * len(binsequence)) newseq[0] = binsequence[0] newseq[(- 1)] = binsequence[(- 1)] firstpage = 0 lastpage = (len(binsequence) - 1) if (binsequence[0] != 1): for i in range(1, (len(binsequence) - 1)): total = sum(binsequence[(i - 1):(i + 2)]) if (total < 2): newseq[i] = 0 else: newseq[i] = 1 firstpage = i break else: firstpage = 0 if (binsequence[(len(binsequence) - 1)] != 1): for i in range((len(binsequence) - 2), (- 1), (- 1)): total = sum(binsequence[(i - 1):(i + 2)]) if (total < 2): newseq[i] = 0 else: newseq[i] = 1 lastpage = i break else: lastpage = (len(binsequence) - 1) return (firstpage, lastpage)
def trimends(inputseries): '\n Returns the first page and last page considered\n to belong to the specified genre. Note that this\n is normal world "first" and "last," not the fracked-up\n programming-world definition of ranges where "last"\n is the place you stop, aka last+1.\n ' binsequence = list() for element in inputseries: if (float(element) > 0.5): binsequence.append(1) else: binsequence.append(0) assert (len(binsequence) == len(inputseries)) if (len(binsequence) < 5): return (0, (len(binsequence) - 1)) newseq = ([1] * len(binsequence)) newseq[0] = binsequence[0] newseq[(- 1)] = binsequence[(- 1)] firstpage = 0 lastpage = (len(binsequence) - 1) if (binsequence[0] != 1): for i in range(1, (len(binsequence) - 1)): total = sum(binsequence[(i - 1):(i + 2)]) if (total < 2): newseq[i] = 0 else: newseq[i] = 1 firstpage = i break else: firstpage = 0 if (binsequence[(len(binsequence) - 1)] != 1): for i in range((len(binsequence) - 2), (- 1), (- 1)): total = sum(binsequence[(i - 1):(i + 2)]) if (total < 2): newseq[i] = 0 else: newseq[i] = 1 lastpage = i break else: lastpage = (len(binsequence) - 1) return (firstpage, lastpage)<|docstring|>Returns the first page and last page considered to belong to the specified genre. Note that this is normal world "first" and "last," not the fracked-up programming-world definition of ranges where "last" is the place you stop, aka last+1.<|endoftext|>
246316bb078f9c4fef6b88b07889104e98136890f6c8ea5c0acbda934062b63f
def main(sourcedirs, metapath, modeldir, outpath, pairtree=False): '\n This function can be called from outside the module; it accepts\n path information and then iterates through all the files it\n finds in the metadata at "metapath."\n\n If the pairtree flag is True, we assume sourcedir is the root\n of a pairtree structure. Otherwise we assume it\'s a flat list.\n ' models = [] modelpaths = glob.glob((modeldir + '*.p')) assert (len(modelpaths) == 1) model = loadamodel(modelpaths[0]) metadata = get_metadata(metapath) notfound = dict() c = 0 path = '' for docid in metadata.index: print(c) c += 1 if pairtree: found = False for sourcedir in sourcedirs: path = get_pairtree(sourcedir, docid) if os.path.isfile(path): found = True chosenpath = path if (not found): print(path) print('file not found') error = 'file not found' wordcount = 0 else: (pagecounts, pageids, error) = get_counts_4pages(chosenpath, docid) else: path = os.path.join(sourcedir, (utils.clean_pairtree(docid) + '.csv')) (pagecounts, pageids, error) = pagecounts4file(path) if (error == 'success'): volumejson = predict_volume(model, pageids, pagecounts, docid) volumestring = json.dumps(volumejson) with open(outpath, mode='a', encoding='utf-8') as f: f.write((volumestring + '\n')) print(docid) else: notfound[docid] = error print(docid, error) with open('fictionpagesnotfound.txt', mode='a', encoding='utf-8') as f: for (vol, reason) in notfound.items(): f.write((((vol + '\t') + reason) + '\n'))
This function can be called from outside the module; it accepts path information and then iterates through all the files it finds in the metadata at "metapath." If the pairtree flag is True, we assume sourcedir is the root of a pairtree structure. Otherwise we assume it's a flat list.
code/implementpagemodel.py
main
tedunderwood/hathimetadata
4
python
def main(sourcedirs, metapath, modeldir, outpath, pairtree=False): '\n This function can be called from outside the module; it accepts\n path information and then iterates through all the files it\n finds in the metadata at "metapath."\n\n If the pairtree flag is True, we assume sourcedir is the root\n of a pairtree structure. Otherwise we assume it\'s a flat list.\n ' models = [] modelpaths = glob.glob((modeldir + '*.p')) assert (len(modelpaths) == 1) model = loadamodel(modelpaths[0]) metadata = get_metadata(metapath) notfound = dict() c = 0 path = for docid in metadata.index: print(c) c += 1 if pairtree: found = False for sourcedir in sourcedirs: path = get_pairtree(sourcedir, docid) if os.path.isfile(path): found = True chosenpath = path if (not found): print(path) print('file not found') error = 'file not found' wordcount = 0 else: (pagecounts, pageids, error) = get_counts_4pages(chosenpath, docid) else: path = os.path.join(sourcedir, (utils.clean_pairtree(docid) + '.csv')) (pagecounts, pageids, error) = pagecounts4file(path) if (error == 'success'): volumejson = predict_volume(model, pageids, pagecounts, docid) volumestring = json.dumps(volumejson) with open(outpath, mode='a', encoding='utf-8') as f: f.write((volumestring + '\n')) print(docid) else: notfound[docid] = error print(docid, error) with open('fictionpagesnotfound.txt', mode='a', encoding='utf-8') as f: for (vol, reason) in notfound.items(): f.write((((vol + '\t') + reason) + '\n'))
def main(sourcedirs, metapath, modeldir, outpath, pairtree=False): '\n This function can be called from outside the module; it accepts\n path information and then iterates through all the files it\n finds in the metadata at "metapath."\n\n If the pairtree flag is True, we assume sourcedir is the root\n of a pairtree structure. Otherwise we assume it\'s a flat list.\n ' models = [] modelpaths = glob.glob((modeldir + '*.p')) assert (len(modelpaths) == 1) model = loadamodel(modelpaths[0]) metadata = get_metadata(metapath) notfound = dict() c = 0 path = for docid in metadata.index: print(c) c += 1 if pairtree: found = False for sourcedir in sourcedirs: path = get_pairtree(sourcedir, docid) if os.path.isfile(path): found = True chosenpath = path if (not found): print(path) print('file not found') error = 'file not found' wordcount = 0 else: (pagecounts, pageids, error) = get_counts_4pages(chosenpath, docid) else: path = os.path.join(sourcedir, (utils.clean_pairtree(docid) + '.csv')) (pagecounts, pageids, error) = pagecounts4file(path) if (error == 'success'): volumejson = predict_volume(model, pageids, pagecounts, docid) volumestring = json.dumps(volumejson) with open(outpath, mode='a', encoding='utf-8') as f: f.write((volumestring + '\n')) print(docid) else: notfound[docid] = error print(docid, error) with open('fictionpagesnotfound.txt', mode='a', encoding='utf-8') as f: for (vol, reason) in notfound.items(): f.write((((vol + '\t') + reason) + '\n'))<|docstring|>This function can be called from outside the module; it accepts path information and then iterates through all the files it finds in the metadata at "metapath." If the pairtree flag is True, we assume sourcedir is the root of a pairtree structure. Otherwise we assume it's a flat list.<|endoftext|>
fdcb7e24c5578079743450e71033f72fca24b0ecf665a8cc717f7e80bdf4b7ba
def measure_velocities_from_timetraces(dataset_name, save, noshow=False): '\n maximise Sum_i(Envelope(TimeTrace[tof_backwall_i]))\n ' conf = arim.io.load_conf(dataset_name) root_dir = conf['root_dir'] result_dir = conf['result_dir'] frame = common.load_frame(conf, apply_filter=True, expand=True, warn_if_fallback_vel=False) frame.scanlines = np.abs(frame.scanlines) base_l_vel = ((conf['block_material']['longitudinal_vel'] // 10) * 10) l_vel_range_1 = np.arange((base_l_vel - 100), (base_l_vel + 100.1), 10.0) intensities_1 = _measure_l_vel(conf, frame, l_vel_range_1) l_vel_1_idx = intensities_1.values.argmax() if ((l_vel_1_idx == 0) or (l_vel_1_idx == (len(l_vel_range_1) - 1))): raise IndefiniteVelocityError l_vel_range_2 = np.arange((l_vel_range_1[(l_vel_1_idx - 1)] + 1), l_vel_range_1[(l_vel_1_idx + 1)], 1.0) intensities_2 = _measure_l_vel(conf, frame, l_vel_range_2) intensities = pd.concat([intensities_1, intensities_2]).sort_index() l_vel_opt = intensities.idxmax() logger.info(f'Optimal L velocitiy: {l_vel_opt} m/s') conf['block_material']['longitudinal_vel'] = l_vel_opt plt.figure() plt.plot(intensities.index, intensities, '.-') plt.xlabel('L velocitiy (m/s)') plt.ylabel('Backwall LL intensity') plt.title(f'Optimum: {l_vel_opt}') if save: plt.savefig((result_dir / 'velocity_L')) base_t_vel = ((conf['block_material']['transverse_vel'] // 10) * 10) t_vel_range_1 = np.arange((base_t_vel - 100), (base_t_vel + 100.1), 10.0) intensities_1 = _measure_t_vel(conf, frame, t_vel_range_1) t_vel_1_idx = intensities_1.values.argmax() if ((t_vel_1_idx == 0) or (t_vel_1_idx == (len(t_vel_range_1) - 1))): raise IndefiniteVelocityError t_vel_range_2 = np.arange((t_vel_range_1[(t_vel_1_idx - 1)] + 1), t_vel_range_1[(t_vel_1_idx + 1)], 1.0) intensities_2 = _measure_t_vel(conf, frame, t_vel_range_2) intensities = pd.concat([intensities_1, intensities_2]).sort_index() t_vel_opt = intensities.idxmax() logger.info(f'Optimal T velocitiy: {t_vel_opt} m/s') conf['block_material']['transverse_vel'] = t_vel_opt plt.figure() plt.plot(intensities.index, intensities, '.-') plt.xlabel('T velocitiy (m/s)') plt.ylabel('Backwall LT intensity') plt.title(f'Optimum: {t_vel_opt}') if save: plt.savefig((result_dir / 'velocity_T')) if save: block_conf = dict(longitudinal_vel=float(l_vel_opt), transverse_vel=float(t_vel_opt), metadata=dict(source='Velocities measured from TFM', is_fallback=False)) block_conf2 = dict(block_material=block_conf) with (root_dir / 'conf.d/30_block_velocities.yaml').open('w') as f: f.write('# generated by measure_velocities_from_timetraces.py\n') yaml.dump(block_conf2, f, default_flow_style=False) if noshow: plt.close('all') else: plt.show() return (l_vel_opt, t_vel_opt)
maximise Sum_i(Envelope(TimeTrace[tof_backwall_i]))
arimtoolkit/measure_velocities_from_timetraces.py
measure_velocities_from_timetraces
nbud/arimtoolkit
0
python
def measure_velocities_from_timetraces(dataset_name, save, noshow=False): '\n \n ' conf = arim.io.load_conf(dataset_name) root_dir = conf['root_dir'] result_dir = conf['result_dir'] frame = common.load_frame(conf, apply_filter=True, expand=True, warn_if_fallback_vel=False) frame.scanlines = np.abs(frame.scanlines) base_l_vel = ((conf['block_material']['longitudinal_vel'] // 10) * 10) l_vel_range_1 = np.arange((base_l_vel - 100), (base_l_vel + 100.1), 10.0) intensities_1 = _measure_l_vel(conf, frame, l_vel_range_1) l_vel_1_idx = intensities_1.values.argmax() if ((l_vel_1_idx == 0) or (l_vel_1_idx == (len(l_vel_range_1) - 1))): raise IndefiniteVelocityError l_vel_range_2 = np.arange((l_vel_range_1[(l_vel_1_idx - 1)] + 1), l_vel_range_1[(l_vel_1_idx + 1)], 1.0) intensities_2 = _measure_l_vel(conf, frame, l_vel_range_2) intensities = pd.concat([intensities_1, intensities_2]).sort_index() l_vel_opt = intensities.idxmax() logger.info(f'Optimal L velocitiy: {l_vel_opt} m/s') conf['block_material']['longitudinal_vel'] = l_vel_opt plt.figure() plt.plot(intensities.index, intensities, '.-') plt.xlabel('L velocitiy (m/s)') plt.ylabel('Backwall LL intensity') plt.title(f'Optimum: {l_vel_opt}') if save: plt.savefig((result_dir / 'velocity_L')) base_t_vel = ((conf['block_material']['transverse_vel'] // 10) * 10) t_vel_range_1 = np.arange((base_t_vel - 100), (base_t_vel + 100.1), 10.0) intensities_1 = _measure_t_vel(conf, frame, t_vel_range_1) t_vel_1_idx = intensities_1.values.argmax() if ((t_vel_1_idx == 0) or (t_vel_1_idx == (len(t_vel_range_1) - 1))): raise IndefiniteVelocityError t_vel_range_2 = np.arange((t_vel_range_1[(t_vel_1_idx - 1)] + 1), t_vel_range_1[(t_vel_1_idx + 1)], 1.0) intensities_2 = _measure_t_vel(conf, frame, t_vel_range_2) intensities = pd.concat([intensities_1, intensities_2]).sort_index() t_vel_opt = intensities.idxmax() logger.info(f'Optimal T velocitiy: {t_vel_opt} m/s') conf['block_material']['transverse_vel'] = t_vel_opt plt.figure() plt.plot(intensities.index, intensities, '.-') plt.xlabel('T velocitiy (m/s)') plt.ylabel('Backwall LT intensity') plt.title(f'Optimum: {t_vel_opt}') if save: plt.savefig((result_dir / 'velocity_T')) if save: block_conf = dict(longitudinal_vel=float(l_vel_opt), transverse_vel=float(t_vel_opt), metadata=dict(source='Velocities measured from TFM', is_fallback=False)) block_conf2 = dict(block_material=block_conf) with (root_dir / 'conf.d/30_block_velocities.yaml').open('w') as f: f.write('# generated by measure_velocities_from_timetraces.py\n') yaml.dump(block_conf2, f, default_flow_style=False) if noshow: plt.close('all') else: plt.show() return (l_vel_opt, t_vel_opt)
def measure_velocities_from_timetraces(dataset_name, save, noshow=False): '\n \n ' conf = arim.io.load_conf(dataset_name) root_dir = conf['root_dir'] result_dir = conf['result_dir'] frame = common.load_frame(conf, apply_filter=True, expand=True, warn_if_fallback_vel=False) frame.scanlines = np.abs(frame.scanlines) base_l_vel = ((conf['block_material']['longitudinal_vel'] // 10) * 10) l_vel_range_1 = np.arange((base_l_vel - 100), (base_l_vel + 100.1), 10.0) intensities_1 = _measure_l_vel(conf, frame, l_vel_range_1) l_vel_1_idx = intensities_1.values.argmax() if ((l_vel_1_idx == 0) or (l_vel_1_idx == (len(l_vel_range_1) - 1))): raise IndefiniteVelocityError l_vel_range_2 = np.arange((l_vel_range_1[(l_vel_1_idx - 1)] + 1), l_vel_range_1[(l_vel_1_idx + 1)], 1.0) intensities_2 = _measure_l_vel(conf, frame, l_vel_range_2) intensities = pd.concat([intensities_1, intensities_2]).sort_index() l_vel_opt = intensities.idxmax() logger.info(f'Optimal L velocitiy: {l_vel_opt} m/s') conf['block_material']['longitudinal_vel'] = l_vel_opt plt.figure() plt.plot(intensities.index, intensities, '.-') plt.xlabel('L velocitiy (m/s)') plt.ylabel('Backwall LL intensity') plt.title(f'Optimum: {l_vel_opt}') if save: plt.savefig((result_dir / 'velocity_L')) base_t_vel = ((conf['block_material']['transverse_vel'] // 10) * 10) t_vel_range_1 = np.arange((base_t_vel - 100), (base_t_vel + 100.1), 10.0) intensities_1 = _measure_t_vel(conf, frame, t_vel_range_1) t_vel_1_idx = intensities_1.values.argmax() if ((t_vel_1_idx == 0) or (t_vel_1_idx == (len(t_vel_range_1) - 1))): raise IndefiniteVelocityError t_vel_range_2 = np.arange((t_vel_range_1[(t_vel_1_idx - 1)] + 1), t_vel_range_1[(t_vel_1_idx + 1)], 1.0) intensities_2 = _measure_t_vel(conf, frame, t_vel_range_2) intensities = pd.concat([intensities_1, intensities_2]).sort_index() t_vel_opt = intensities.idxmax() logger.info(f'Optimal T velocitiy: {t_vel_opt} m/s') conf['block_material']['transverse_vel'] = t_vel_opt plt.figure() plt.plot(intensities.index, intensities, '.-') plt.xlabel('T velocitiy (m/s)') plt.ylabel('Backwall LT intensity') plt.title(f'Optimum: {t_vel_opt}') if save: plt.savefig((result_dir / 'velocity_T')) if save: block_conf = dict(longitudinal_vel=float(l_vel_opt), transverse_vel=float(t_vel_opt), metadata=dict(source='Velocities measured from TFM', is_fallback=False)) block_conf2 = dict(block_material=block_conf) with (root_dir / 'conf.d/30_block_velocities.yaml').open('w') as f: f.write('# generated by measure_velocities_from_timetraces.py\n') yaml.dump(block_conf2, f, default_flow_style=False) if noshow: plt.close('all') else: plt.show() return (l_vel_opt, t_vel_opt)<|docstring|>maximise Sum_i(Envelope(TimeTrace[tof_backwall_i]))<|endoftext|>
23282078951d659bf1c1e93b5f46b9eb51ad76ae1b614620dd5c256a4dce74b9
def add_node_to_graph(self, graph, nodename, nodelabel=None, shape='box', color=None, url=None, tooltip=None): '\n Create a node item for this factory, adds it to the graph.\n\n Node name can vary from label but must always be same for the same node label\n ' if ((nodename is None) or (nodename == '')): raise ValueError('Empty Node name') if (nodelabel is None): nodelabel = nodename node = pydot.Node(self.escape_name(nodename)) node.set_shape(shape) node.set_label(self.escape_label(nodelabel)) if (tooltip is not None): node.set_tooltip(tooltip) elif (url is not None): node.set_tooltip(url) if (url is not None): node.set_URL(self.escape_name(url)) if (color is not None): node.set_color(color) graph.add_node(node)
Create a node item for this factory, adds it to the graph. Node name can vary from label but must always be same for the same node label
melodic/src/qt_gui_core/qt_dotgraph/src/qt_dotgraph/pydotfactory.py
add_node_to_graph
disorn-inc/ROS-melodic-python3-Opencv-4.1.1-CUDA
2
python
def add_node_to_graph(self, graph, nodename, nodelabel=None, shape='box', color=None, url=None, tooltip=None): '\n Create a node item for this factory, adds it to the graph.\n\n Node name can vary from label but must always be same for the same node label\n ' if ((nodename is None) or (nodename == )): raise ValueError('Empty Node name') if (nodelabel is None): nodelabel = nodename node = pydot.Node(self.escape_name(nodename)) node.set_shape(shape) node.set_label(self.escape_label(nodelabel)) if (tooltip is not None): node.set_tooltip(tooltip) elif (url is not None): node.set_tooltip(url) if (url is not None): node.set_URL(self.escape_name(url)) if (color is not None): node.set_color(color) graph.add_node(node)
def add_node_to_graph(self, graph, nodename, nodelabel=None, shape='box', color=None, url=None, tooltip=None): '\n Create a node item for this factory, adds it to the graph.\n\n Node name can vary from label but must always be same for the same node label\n ' if ((nodename is None) or (nodename == )): raise ValueError('Empty Node name') if (nodelabel is None): nodelabel = nodename node = pydot.Node(self.escape_name(nodename)) node.set_shape(shape) node.set_label(self.escape_label(nodelabel)) if (tooltip is not None): node.set_tooltip(tooltip) elif (url is not None): node.set_tooltip(url) if (url is not None): node.set_URL(self.escape_name(url)) if (color is not None): node.set_color(color) graph.add_node(node)<|docstring|>Create a node item for this factory, adds it to the graph. Node name can vary from label but must always be same for the same node label<|endoftext|>
24290a22af54be60e086c2db09a3bd90a25f2e7c5da45500fa43ab4cf664a3c7
def add_subgraph_to_graph(self, graph, subgraphname, rank='same', simplify=True, rankdir='TB', ranksep=0.2, compound=True, color=None, shape='box', style='bold', subgraphlabel=None): '\n Create a cluster subgraph item for this factory, adds it to the graph.\n\n cluster name can vary from label but must always be same for the same node label.\n Most layouters require cluster names to start with cluster.\n ' if ((subgraphname is None) or (subgraphname == '')): raise ValueError('Empty subgraph name') g = pydot.Cluster(self.escape_name(subgraphname), rank=rank, rankdir=rankdir, simplify=simplify) if ('set_style' in g.__dict__): g.set_style(style) if ('set_shape' in g.__dict__): g.set_shape(shape) if (LooseVersion(pydot.__version__) > LooseVersion('1.0.10')): g.set_compound(compound) g.set_ranksep(ranksep) subgraphlabel = (subgraphname if (subgraphlabel is None) else subgraphlabel) subgraphlabel = self.escape_label(subgraphlabel) if subgraphlabel: g.set_label(subgraphlabel) if ('set_color' in g.__dict__): if (color is not None): g.set_color(color) graph.add_subgraph(g) return g
Create a cluster subgraph item for this factory, adds it to the graph. cluster name can vary from label but must always be same for the same node label. Most layouters require cluster names to start with cluster.
melodic/src/qt_gui_core/qt_dotgraph/src/qt_dotgraph/pydotfactory.py
add_subgraph_to_graph
disorn-inc/ROS-melodic-python3-Opencv-4.1.1-CUDA
2
python
def add_subgraph_to_graph(self, graph, subgraphname, rank='same', simplify=True, rankdir='TB', ranksep=0.2, compound=True, color=None, shape='box', style='bold', subgraphlabel=None): '\n Create a cluster subgraph item for this factory, adds it to the graph.\n\n cluster name can vary from label but must always be same for the same node label.\n Most layouters require cluster names to start with cluster.\n ' if ((subgraphname is None) or (subgraphname == )): raise ValueError('Empty subgraph name') g = pydot.Cluster(self.escape_name(subgraphname), rank=rank, rankdir=rankdir, simplify=simplify) if ('set_style' in g.__dict__): g.set_style(style) if ('set_shape' in g.__dict__): g.set_shape(shape) if (LooseVersion(pydot.__version__) > LooseVersion('1.0.10')): g.set_compound(compound) g.set_ranksep(ranksep) subgraphlabel = (subgraphname if (subgraphlabel is None) else subgraphlabel) subgraphlabel = self.escape_label(subgraphlabel) if subgraphlabel: g.set_label(subgraphlabel) if ('set_color' in g.__dict__): if (color is not None): g.set_color(color) graph.add_subgraph(g) return g
def add_subgraph_to_graph(self, graph, subgraphname, rank='same', simplify=True, rankdir='TB', ranksep=0.2, compound=True, color=None, shape='box', style='bold', subgraphlabel=None): '\n Create a cluster subgraph item for this factory, adds it to the graph.\n\n cluster name can vary from label but must always be same for the same node label.\n Most layouters require cluster names to start with cluster.\n ' if ((subgraphname is None) or (subgraphname == )): raise ValueError('Empty subgraph name') g = pydot.Cluster(self.escape_name(subgraphname), rank=rank, rankdir=rankdir, simplify=simplify) if ('set_style' in g.__dict__): g.set_style(style) if ('set_shape' in g.__dict__): g.set_shape(shape) if (LooseVersion(pydot.__version__) > LooseVersion('1.0.10')): g.set_compound(compound) g.set_ranksep(ranksep) subgraphlabel = (subgraphname if (subgraphlabel is None) else subgraphlabel) subgraphlabel = self.escape_label(subgraphlabel) if subgraphlabel: g.set_label(subgraphlabel) if ('set_color' in g.__dict__): if (color is not None): g.set_color(color) graph.add_subgraph(g) return g<|docstring|>Create a cluster subgraph item for this factory, adds it to the graph. cluster name can vary from label but must always be same for the same node label. Most layouters require cluster names to start with cluster.<|endoftext|>
bc685d0b9e508e45698278c64b81162b2d81d725db15674281e0f98a6b970ed3
def alter_field(self, model, old_field, new_field, strict=False): '\n Vertica do not allow alter column type if it is used in constraints such as UNIQUE.\n In order to all work, the constraint is dropped, column altered, constraint recreated.\n ' curr = self.connection.cursor() result = curr.execute(("\n select cc.constraint_name\n from constraint_columns cc\n where 1=1\n and cc.table_name = '%s'\n and cc.constraint_type = 'u'\n and cc.column_name = '%s'" % (model._meta.db_table, new_field.column))).fetchone() if result: constraint_name = result[0] drop_statement = self._delete_constraint_sql(self.sql_delete_unique, model, constraint_name) self.execute(drop_statement) super().alter_field(model, old_field, new_field, strict) create_statement = self._create_unique_sql(model, [new_field.column], constraint_name) self.execute(create_statement) else: super().alter_field(model, old_field, new_field, strict)
Vertica do not allow alter column type if it is used in constraints such as UNIQUE. In order to all work, the constraint is dropped, column altered, constraint recreated.
vertica/schema.py
alter_field
emushell/django_vertica_backend
0
python
def alter_field(self, model, old_field, new_field, strict=False): '\n Vertica do not allow alter column type if it is used in constraints such as UNIQUE.\n In order to all work, the constraint is dropped, column altered, constraint recreated.\n ' curr = self.connection.cursor() result = curr.execute(("\n select cc.constraint_name\n from constraint_columns cc\n where 1=1\n and cc.table_name = '%s'\n and cc.constraint_type = 'u'\n and cc.column_name = '%s'" % (model._meta.db_table, new_field.column))).fetchone() if result: constraint_name = result[0] drop_statement = self._delete_constraint_sql(self.sql_delete_unique, model, constraint_name) self.execute(drop_statement) super().alter_field(model, old_field, new_field, strict) create_statement = self._create_unique_sql(model, [new_field.column], constraint_name) self.execute(create_statement) else: super().alter_field(model, old_field, new_field, strict)
def alter_field(self, model, old_field, new_field, strict=False): '\n Vertica do not allow alter column type if it is used in constraints such as UNIQUE.\n In order to all work, the constraint is dropped, column altered, constraint recreated.\n ' curr = self.connection.cursor() result = curr.execute(("\n select cc.constraint_name\n from constraint_columns cc\n where 1=1\n and cc.table_name = '%s'\n and cc.constraint_type = 'u'\n and cc.column_name = '%s'" % (model._meta.db_table, new_field.column))).fetchone() if result: constraint_name = result[0] drop_statement = self._delete_constraint_sql(self.sql_delete_unique, model, constraint_name) self.execute(drop_statement) super().alter_field(model, old_field, new_field, strict) create_statement = self._create_unique_sql(model, [new_field.column], constraint_name) self.execute(create_statement) else: super().alter_field(model, old_field, new_field, strict)<|docstring|>Vertica do not allow alter column type if it is used in constraints such as UNIQUE. In order to all work, the constraint is dropped, column altered, constraint recreated.<|endoftext|>
f62340220d3dacd7a22c7d528794233fe7f6890e839d7022352545c54d5e7859
def column_sql(self, model, field, include_default=False): '\n Take a field and return its column definition.\n The field must already have had set_attributes_from_name() called.\n ' db_params = field.db_parameters(connection=self.connection) sql = db_params['type'] params = [] if (sql is None): return (None, None) null = field.null include_default = (include_default and (not self.skip_default(field))) if include_default: default_value = self.effective_default(field) if (default_value is not None): if self.connection.features.requires_literal_defaults: sql += (' DEFAULT %s' % self.prepare_default(default_value)) else: sql += ' DEFAULT %s' params += [default_value] if (field.empty_strings_allowed and (not field.primary_key) and self.connection.features.interprets_empty_strings_as_nulls): null = True if (null and (not self.connection.features.implied_column_null)): sql += self.custom_sql_column_null elif (not null): sql += self.custom_sql_column_not_null if field.primary_key: sql += self.custom_sql_column_pk elif field.unique: sql += self.custom_sql_column_unique return (sql, params)
Take a field and return its column definition. The field must already have had set_attributes_from_name() called.
vertica/schema.py
column_sql
emushell/django_vertica_backend
0
python
def column_sql(self, model, field, include_default=False): '\n Take a field and return its column definition.\n The field must already have had set_attributes_from_name() called.\n ' db_params = field.db_parameters(connection=self.connection) sql = db_params['type'] params = [] if (sql is None): return (None, None) null = field.null include_default = (include_default and (not self.skip_default(field))) if include_default: default_value = self.effective_default(field) if (default_value is not None): if self.connection.features.requires_literal_defaults: sql += (' DEFAULT %s' % self.prepare_default(default_value)) else: sql += ' DEFAULT %s' params += [default_value] if (field.empty_strings_allowed and (not field.primary_key) and self.connection.features.interprets_empty_strings_as_nulls): null = True if (null and (not self.connection.features.implied_column_null)): sql += self.custom_sql_column_null elif (not null): sql += self.custom_sql_column_not_null if field.primary_key: sql += self.custom_sql_column_pk elif field.unique: sql += self.custom_sql_column_unique return (sql, params)
def column_sql(self, model, field, include_default=False): '\n Take a field and return its column definition.\n The field must already have had set_attributes_from_name() called.\n ' db_params = field.db_parameters(connection=self.connection) sql = db_params['type'] params = [] if (sql is None): return (None, None) null = field.null include_default = (include_default and (not self.skip_default(field))) if include_default: default_value = self.effective_default(field) if (default_value is not None): if self.connection.features.requires_literal_defaults: sql += (' DEFAULT %s' % self.prepare_default(default_value)) else: sql += ' DEFAULT %s' params += [default_value] if (field.empty_strings_allowed and (not field.primary_key) and self.connection.features.interprets_empty_strings_as_nulls): null = True if (null and (not self.connection.features.implied_column_null)): sql += self.custom_sql_column_null elif (not null): sql += self.custom_sql_column_not_null if field.primary_key: sql += self.custom_sql_column_pk elif field.unique: sql += self.custom_sql_column_unique return (sql, params)<|docstring|>Take a field and return its column definition. The field must already have had set_attributes_from_name() called.<|endoftext|>
3eef918e18994ed3e3971a1018e6404e578b7722492ec0943702c397e7352f0f
def _model_indexes_sql(self, model): "\n Vertica dose not support INDEX's.\n Skip all statements which are related to index creation or manipulation.\n " return []
Vertica dose not support INDEX's. Skip all statements which are related to index creation or manipulation.
vertica/schema.py
_model_indexes_sql
emushell/django_vertica_backend
0
python
def _model_indexes_sql(self, model): "\n Vertica dose not support INDEX's.\n Skip all statements which are related to index creation or manipulation.\n " return []
def _model_indexes_sql(self, model): "\n Vertica dose not support INDEX's.\n Skip all statements which are related to index creation or manipulation.\n " return []<|docstring|>Vertica dose not support INDEX's. Skip all statements which are related to index creation or manipulation.<|endoftext|>
3e38ac3bc9318b86b285ec1c0161b8916ac801892c55a6fc36a883f91b2a1c9c
def setUp(self): '\n Setup method that is called at the beginning of each test.\n ' (self.documents, self.users) = (18, 10) (documents_cnt, users_cnt) = (self.documents, self.users) self.n_iterations = 15 self.k_folds = 3 self.hyperparameters = {'n_factors': 5, '_lambda': 0.01} self.options = {'n_iterations': self.n_iterations, 'k_folds': self.k_folds} self.initializer = ModelInitializer(self.hyperparameters.copy(), self.n_iterations) self.n_recommendations = 1 def mock_get_ratings_matrix(self=None): return [[int((not bool(((article + user) % 3)))) for article in range(documents_cnt)] for user in range(users_cnt)] self.ratings_matrix = numpy.array(mock_get_ratings_matrix()) setattr(DataParser, 'get_ratings_matrix', mock_get_ratings_matrix) self.evaluator = Evaluator(self.ratings_matrix) self.cf = CollaborativeFiltering(self.initializer, self.evaluator, self.hyperparameters, self.options, load_matrices=True) self.cf.train() self.cf.evaluator.k_folds = self.k_folds self.test_data = self.cf.test_data self.predictions = self.cf.get_predictions() self.rounded_predictions = self.cf.rounded_predictions()
Setup method that is called at the beginning of each test.
tests/evaluator_tests.py
setUp
mostafa-mahmoud/HyPRec
5
python
def setUp(self): '\n \n ' (self.documents, self.users) = (18, 10) (documents_cnt, users_cnt) = (self.documents, self.users) self.n_iterations = 15 self.k_folds = 3 self.hyperparameters = {'n_factors': 5, '_lambda': 0.01} self.options = {'n_iterations': self.n_iterations, 'k_folds': self.k_folds} self.initializer = ModelInitializer(self.hyperparameters.copy(), self.n_iterations) self.n_recommendations = 1 def mock_get_ratings_matrix(self=None): return [[int((not bool(((article + user) % 3)))) for article in range(documents_cnt)] for user in range(users_cnt)] self.ratings_matrix = numpy.array(mock_get_ratings_matrix()) setattr(DataParser, 'get_ratings_matrix', mock_get_ratings_matrix) self.evaluator = Evaluator(self.ratings_matrix) self.cf = CollaborativeFiltering(self.initializer, self.evaluator, self.hyperparameters, self.options, load_matrices=True) self.cf.train() self.cf.evaluator.k_folds = self.k_folds self.test_data = self.cf.test_data self.predictions = self.cf.get_predictions() self.rounded_predictions = self.cf.rounded_predictions()
def setUp(self): '\n \n ' (self.documents, self.users) = (18, 10) (documents_cnt, users_cnt) = (self.documents, self.users) self.n_iterations = 15 self.k_folds = 3 self.hyperparameters = {'n_factors': 5, '_lambda': 0.01} self.options = {'n_iterations': self.n_iterations, 'k_folds': self.k_folds} self.initializer = ModelInitializer(self.hyperparameters.copy(), self.n_iterations) self.n_recommendations = 1 def mock_get_ratings_matrix(self=None): return [[int((not bool(((article + user) % 3)))) for article in range(documents_cnt)] for user in range(users_cnt)] self.ratings_matrix = numpy.array(mock_get_ratings_matrix()) setattr(DataParser, 'get_ratings_matrix', mock_get_ratings_matrix) self.evaluator = Evaluator(self.ratings_matrix) self.cf = CollaborativeFiltering(self.initializer, self.evaluator, self.hyperparameters, self.options, load_matrices=True) self.cf.train() self.cf.evaluator.k_folds = self.k_folds self.test_data = self.cf.test_data self.predictions = self.cf.get_predictions() self.rounded_predictions = self.cf.rounded_predictions()<|docstring|>Setup method that is called at the beginning of each test.<|endoftext|>
fc7e1f47a518a5881efe16bcdea02a2a4a2ac9281444a87052cf7ea5bf131943
def moon_phase_code_to_name(code, lang='en'): 'Converts moon phase code to name.' return moon_phase_names[lang][code]
Converts moon phase code to name.
generate.py
moon_phase_code_to_name
PanderMusubi/lunar-phase-calendar
9
python
def moon_phase_code_to_name(code, lang='en'): return moon_phase_names[lang][code]
def moon_phase_code_to_name(code, lang='en'): return moon_phase_names[lang][code]<|docstring|>Converts moon phase code to name.<|endoftext|>
1370b37c5ee5ccaa15aa043bcfba47a66c95ad86d69fa88b86a67a83ac3587cc
def moon_phase_code_to_symbol(code): 'Converts moon phase code to symbol.' return moon_phase_symbols[code]
Converts moon phase code to symbol.
generate.py
moon_phase_code_to_symbol
PanderMusubi/lunar-phase-calendar
9
python
def moon_phase_code_to_symbol(code): return moon_phase_symbols[code]
def moon_phase_code_to_symbol(code): return moon_phase_symbols[code]<|docstring|>Converts moon phase code to symbol.<|endoftext|>
5a0e6b366cfc42a45532a6412f9acf0d4b298977cc8d28ac14fef32de08beea2
def moon_phase_to_inacurate_code(phase): 'Converts moon phase code to inacurate code.' phase = int(phase) value = None if (phase == 0): value = 0 elif (0 < phase < 7): value = 1 elif (phase == 7): value = 2 elif (7 < phase < 14): value = 3 elif (phase == 14): value = 4 elif (14 < phase < 21): value = 5 elif (phase == 21): value = 6 else: value = 7 return value
Converts moon phase code to inacurate code.
generate.py
moon_phase_to_inacurate_code
PanderMusubi/lunar-phase-calendar
9
python
def moon_phase_to_inacurate_code(phase): phase = int(phase) value = None if (phase == 0): value = 0 elif (0 < phase < 7): value = 1 elif (phase == 7): value = 2 elif (7 < phase < 14): value = 3 elif (phase == 14): value = 4 elif (14 < phase < 21): value = 5 elif (phase == 21): value = 6 else: value = 7 return value
def moon_phase_to_inacurate_code(phase): phase = int(phase) value = None if (phase == 0): value = 0 elif (0 < phase < 7): value = 1 elif (phase == 7): value = 2 elif (7 < phase < 14): value = 3 elif (phase == 14): value = 4 elif (14 < phase < 21): value = 5 elif (phase == 21): value = 6 else: value = 7 return value<|docstring|>Converts moon phase code to inacurate code.<|endoftext|>
6173927af6d220edfe877f74949a23fa70fa0d7b8383af53f4b2fb3289d0a0d5
def day_to_moon_phase_and_accurate_code(day): 'Converts day to moon phase and accurate code.' phase_today = moon.phase(day) code_today = moon_phase_to_inacurate_code(phase_today) if ((code_today % 2) != 0): return (phase_today, code_today) phase_yesterday = moon.phase((day - timedelta(days=1))) code_yesterday = moon_phase_to_inacurate_code(phase_yesterday) if (code_today == code_yesterday): return (phase_today, (code_today + 1)) return (phase_today, code_today)
Converts day to moon phase and accurate code.
generate.py
day_to_moon_phase_and_accurate_code
PanderMusubi/lunar-phase-calendar
9
python
def day_to_moon_phase_and_accurate_code(day): phase_today = moon.phase(day) code_today = moon_phase_to_inacurate_code(phase_today) if ((code_today % 2) != 0): return (phase_today, code_today) phase_yesterday = moon.phase((day - timedelta(days=1))) code_yesterday = moon_phase_to_inacurate_code(phase_yesterday) if (code_today == code_yesterday): return (phase_today, (code_today + 1)) return (phase_today, code_today)
def day_to_moon_phase_and_accurate_code(day): phase_today = moon.phase(day) code_today = moon_phase_to_inacurate_code(phase_today) if ((code_today % 2) != 0): return (phase_today, code_today) phase_yesterday = moon.phase((day - timedelta(days=1))) code_yesterday = moon_phase_to_inacurate_code(phase_yesterday) if (code_today == code_yesterday): return (phase_today, (code_today + 1)) return (phase_today, code_today)<|docstring|>Converts day to moon phase and accurate code.<|endoftext|>
d738f6327a1bd0f3a23f694835449d68346b4dd9ea1792c082e6aab5bdbd071f
def write_files(lang='en'): 'Writes calendar files.' utcnow = datetime.utcnow() dtstamp = utcnow.strftime('%Y%m%dT%H%M%SZ') uid_format = 'UID:%(date)s-%(pid)d-%(seq)04d-%(lang)s@%(domain)s\n' uid_replace_values = {'date': dtstamp, 'pid': getpid(), 'domain': getfqdn()} event_seq = 1 tsv = open('moon-phases.tsv', 'w') tsv_new = open('new-moon.tsv', 'w') tsv_full = open('full-moon.tsv', 'w') tsv_all = open('moon-phases-all.tsv', 'w') mkd = open('moon-phases.md', 'w') mkd_new = open('new-moon.md', 'w') mkd_full = open('full-moon.md', 'w') mkd_all = open('moon-phases-all.md', 'w') ics = open('moon-phases.ics', 'w', newline='\r\n') ics_new = open('new-moon.ics', 'w', newline='\r\n') ics_full = open('full-moon.ics', 'w', newline='\r\n') tsv_header = '# {}\t# {}\t# {}\t# {}\n'.format(header[lang][0].ljust(10), header[lang][1], header[lang][2], header[lang][3]) tsv_header_short = '# {}\t# {}\n'.format(header[lang][0], header[lang][1]) tsv.write(tsv_header) tsv_all.write(tsv_header) tsv_new.write(tsv_header_short) tsv_full.write(tsv_header_short) title = header[lang][4] if (lang in titles): title = title.title() mkd_header = '# {}\n\n{} | {} | {} | {}\n-----------|-------:|---|---\n'.format(title, header[lang][0].ljust(10), header[lang][1].ljust(6), header[lang][2], header[lang][3]) title = moon_phase_names[lang][0] if (lang in titles): title = title.title() mkd_header_new = '# {}\n\n{} | {}\n-----------|------:\n'.format(title, header[lang][0].ljust(10), header[lang][1]) title = moon_phase_names[lang][4] if (lang in titles): title = title.title() mkd_header_full = '# {}\n\n{} | {}\n-----------|------:\n'.format(title, header[lang][0].ljust(10), header[lang][1]) mkd.write(mkd_header) mkd_all.write(mkd_header) mkd_new.write(mkd_header_new) mkd_full.write(mkd_header_full) calendar_header = open('../templates/calendar-header-{}.txt'.format(lang)) for line in calendar_header: if (lang in titles): ics.write(line.replace('Lunar Phase', header[lang][4].title())) ics_new.write(line.replace('Lunar Phase', moon_phase_names[lang][0].title())) ics_full.write(line.replace('Lunar Phase', moon_phase_names[lang][4].title())) else: ics.write(line.replace('Lunar Phase', header[lang][4])) ics_new.write(line.replace('Lunar Phase', moon_phase_names[lang][0])) ics_full.write(line.replace('Lunar Phase', moon_phase_names[lang][4])) event_header = '' for line in open('../templates/event-header.txt'): event_header += line.replace('DTSTAMP:', 'DTSTAMP:{}'.format(dtstamp)) event_footer = '' for line in open('../templates/event-footer.txt'): event_footer += line today = date.today() start = (today - timedelta(days=(31 + 1))) end = (today + timedelta(days=((2 * 366) + (2 * 31)))) for i in range((end - start).days): day = (start + timedelta(days=i)) (phase, code) = day_to_moon_phase_and_accurate_code(day) symbol = moon_phase_code_to_symbol(code) name = moon_phase_code_to_name(code, lang) tsv_all.write('{}\t{:6.3f}\t{}\t{}\n'.format(day, phase, symbol, name)) mkd_all.write('{} | {:6.3f} | {} | {}\n'.format(day, phase, symbol, name)) if ((code % 2) == 0): tsv.write('{}\t{:6.3f}\t{}\t{}\n'.format(day, phase, symbol, name)) mkd.write('{} | {:6.3f} | {} | {}\n'.format(day, phase, symbol, name)) ics.write('{}{} {}\n'.format(event_header.strip(), symbol, name)) ics.write((uid_format % dict((list(uid_replace_values.items()) + list({'lang': 'nl', 'seq': event_seq}.items()))))) event_seq += 1 ics_start = '{}'.format(day) ics_end = '{}'.format((day + timedelta(days=1))) ics.write('DTSTART;VALUE=DATE:{}\n'.format(ics_start.replace('-', ''))) ics.write('DTEND;VALUE=DATE:{}\n'.format(ics_end.replace('-', ''))) ics.write(event_footer) if (code == 0): tsv_new.write('{}\t{:6.3f}\n'.format(day, phase)) mkd_new.write('{} | {:6.3f}\n'.format(day, phase)) ics_new.write('{}{} {}\n'.format(event_header.strip(), symbol, name)) ics_new.write((uid_format % dict((list(uid_replace_values.items()) + list({'lang': 'nl', 'seq': event_seq}.items()))))) event_seq += 1 ics_start = '{}'.format(day) ics_end = '{}'.format((day + timedelta(days=1))) ics_new.write('DTSTART;VALUE=DATE:{}\n'.format(ics_start.replace('-', ''))) ics_new.write('DTEND;VALUE=DATE:{}\n'.format(ics_end.replace('-', ''))) ics_new.write(event_footer) if (code == 4): tsv_full.write('{}\t{:6.3f}\n'.format(day, phase)) mkd_full.write('{} | {:6.3f}\n'.format(day, phase)) ics_full.write('{}{} {}\n'.format(event_header.strip(), symbol, name)) ics_full.write((uid_format % dict((list(uid_replace_values.items()) + list({'lang': 'nl', 'seq': event_seq}.items()))))) event_seq += 1 ics_start = '{}'.format(day) ics_end = '{}'.format((day + timedelta(days=1))) ics_full.write('DTSTART;VALUE=DATE:{}\n'.format(ics_start.replace('-', ''))) ics_full.write('DTEND;VALUE=DATE:{}\n'.format(ics_end.replace('-', ''))) ics_full.write(event_footer) calendar_footer = open('../templates/calendar-footer.txt') for line in calendar_footer: ics.write(line) ics_new.write(line) ics_full.write(line)
Writes calendar files.
generate.py
write_files
PanderMusubi/lunar-phase-calendar
9
python
def write_files(lang='en'): utcnow = datetime.utcnow() dtstamp = utcnow.strftime('%Y%m%dT%H%M%SZ') uid_format = 'UID:%(date)s-%(pid)d-%(seq)04d-%(lang)s@%(domain)s\n' uid_replace_values = {'date': dtstamp, 'pid': getpid(), 'domain': getfqdn()} event_seq = 1 tsv = open('moon-phases.tsv', 'w') tsv_new = open('new-moon.tsv', 'w') tsv_full = open('full-moon.tsv', 'w') tsv_all = open('moon-phases-all.tsv', 'w') mkd = open('moon-phases.md', 'w') mkd_new = open('new-moon.md', 'w') mkd_full = open('full-moon.md', 'w') mkd_all = open('moon-phases-all.md', 'w') ics = open('moon-phases.ics', 'w', newline='\r\n') ics_new = open('new-moon.ics', 'w', newline='\r\n') ics_full = open('full-moon.ics', 'w', newline='\r\n') tsv_header = '# {}\t# {}\t# {}\t# {}\n'.format(header[lang][0].ljust(10), header[lang][1], header[lang][2], header[lang][3]) tsv_header_short = '# {}\t# {}\n'.format(header[lang][0], header[lang][1]) tsv.write(tsv_header) tsv_all.write(tsv_header) tsv_new.write(tsv_header_short) tsv_full.write(tsv_header_short) title = header[lang][4] if (lang in titles): title = title.title() mkd_header = '# {}\n\n{} | {} | {} | {}\n-----------|-------:|---|---\n'.format(title, header[lang][0].ljust(10), header[lang][1].ljust(6), header[lang][2], header[lang][3]) title = moon_phase_names[lang][0] if (lang in titles): title = title.title() mkd_header_new = '# {}\n\n{} | {}\n-----------|------:\n'.format(title, header[lang][0].ljust(10), header[lang][1]) title = moon_phase_names[lang][4] if (lang in titles): title = title.title() mkd_header_full = '# {}\n\n{} | {}\n-----------|------:\n'.format(title, header[lang][0].ljust(10), header[lang][1]) mkd.write(mkd_header) mkd_all.write(mkd_header) mkd_new.write(mkd_header_new) mkd_full.write(mkd_header_full) calendar_header = open('../templates/calendar-header-{}.txt'.format(lang)) for line in calendar_header: if (lang in titles): ics.write(line.replace('Lunar Phase', header[lang][4].title())) ics_new.write(line.replace('Lunar Phase', moon_phase_names[lang][0].title())) ics_full.write(line.replace('Lunar Phase', moon_phase_names[lang][4].title())) else: ics.write(line.replace('Lunar Phase', header[lang][4])) ics_new.write(line.replace('Lunar Phase', moon_phase_names[lang][0])) ics_full.write(line.replace('Lunar Phase', moon_phase_names[lang][4])) event_header = for line in open('../templates/event-header.txt'): event_header += line.replace('DTSTAMP:', 'DTSTAMP:{}'.format(dtstamp)) event_footer = for line in open('../templates/event-footer.txt'): event_footer += line today = date.today() start = (today - timedelta(days=(31 + 1))) end = (today + timedelta(days=((2 * 366) + (2 * 31)))) for i in range((end - start).days): day = (start + timedelta(days=i)) (phase, code) = day_to_moon_phase_and_accurate_code(day) symbol = moon_phase_code_to_symbol(code) name = moon_phase_code_to_name(code, lang) tsv_all.write('{}\t{:6.3f}\t{}\t{}\n'.format(day, phase, symbol, name)) mkd_all.write('{} | {:6.3f} | {} | {}\n'.format(day, phase, symbol, name)) if ((code % 2) == 0): tsv.write('{}\t{:6.3f}\t{}\t{}\n'.format(day, phase, symbol, name)) mkd.write('{} | {:6.3f} | {} | {}\n'.format(day, phase, symbol, name)) ics.write('{}{} {}\n'.format(event_header.strip(), symbol, name)) ics.write((uid_format % dict((list(uid_replace_values.items()) + list({'lang': 'nl', 'seq': event_seq}.items()))))) event_seq += 1 ics_start = '{}'.format(day) ics_end = '{}'.format((day + timedelta(days=1))) ics.write('DTSTART;VALUE=DATE:{}\n'.format(ics_start.replace('-', ))) ics.write('DTEND;VALUE=DATE:{}\n'.format(ics_end.replace('-', ))) ics.write(event_footer) if (code == 0): tsv_new.write('{}\t{:6.3f}\n'.format(day, phase)) mkd_new.write('{} | {:6.3f}\n'.format(day, phase)) ics_new.write('{}{} {}\n'.format(event_header.strip(), symbol, name)) ics_new.write((uid_format % dict((list(uid_replace_values.items()) + list({'lang': 'nl', 'seq': event_seq}.items()))))) event_seq += 1 ics_start = '{}'.format(day) ics_end = '{}'.format((day + timedelta(days=1))) ics_new.write('DTSTART;VALUE=DATE:{}\n'.format(ics_start.replace('-', ))) ics_new.write('DTEND;VALUE=DATE:{}\n'.format(ics_end.replace('-', ))) ics_new.write(event_footer) if (code == 4): tsv_full.write('{}\t{:6.3f}\n'.format(day, phase)) mkd_full.write('{} | {:6.3f}\n'.format(day, phase)) ics_full.write('{}{} {}\n'.format(event_header.strip(), symbol, name)) ics_full.write((uid_format % dict((list(uid_replace_values.items()) + list({'lang': 'nl', 'seq': event_seq}.items()))))) event_seq += 1 ics_start = '{}'.format(day) ics_end = '{}'.format((day + timedelta(days=1))) ics_full.write('DTSTART;VALUE=DATE:{}\n'.format(ics_start.replace('-', ))) ics_full.write('DTEND;VALUE=DATE:{}\n'.format(ics_end.replace('-', ))) ics_full.write(event_footer) calendar_footer = open('../templates/calendar-footer.txt') for line in calendar_footer: ics.write(line) ics_new.write(line) ics_full.write(line)
def write_files(lang='en'): utcnow = datetime.utcnow() dtstamp = utcnow.strftime('%Y%m%dT%H%M%SZ') uid_format = 'UID:%(date)s-%(pid)d-%(seq)04d-%(lang)s@%(domain)s\n' uid_replace_values = {'date': dtstamp, 'pid': getpid(), 'domain': getfqdn()} event_seq = 1 tsv = open('moon-phases.tsv', 'w') tsv_new = open('new-moon.tsv', 'w') tsv_full = open('full-moon.tsv', 'w') tsv_all = open('moon-phases-all.tsv', 'w') mkd = open('moon-phases.md', 'w') mkd_new = open('new-moon.md', 'w') mkd_full = open('full-moon.md', 'w') mkd_all = open('moon-phases-all.md', 'w') ics = open('moon-phases.ics', 'w', newline='\r\n') ics_new = open('new-moon.ics', 'w', newline='\r\n') ics_full = open('full-moon.ics', 'w', newline='\r\n') tsv_header = '# {}\t# {}\t# {}\t# {}\n'.format(header[lang][0].ljust(10), header[lang][1], header[lang][2], header[lang][3]) tsv_header_short = '# {}\t# {}\n'.format(header[lang][0], header[lang][1]) tsv.write(tsv_header) tsv_all.write(tsv_header) tsv_new.write(tsv_header_short) tsv_full.write(tsv_header_short) title = header[lang][4] if (lang in titles): title = title.title() mkd_header = '# {}\n\n{} | {} | {} | {}\n-----------|-------:|---|---\n'.format(title, header[lang][0].ljust(10), header[lang][1].ljust(6), header[lang][2], header[lang][3]) title = moon_phase_names[lang][0] if (lang in titles): title = title.title() mkd_header_new = '# {}\n\n{} | {}\n-----------|------:\n'.format(title, header[lang][0].ljust(10), header[lang][1]) title = moon_phase_names[lang][4] if (lang in titles): title = title.title() mkd_header_full = '# {}\n\n{} | {}\n-----------|------:\n'.format(title, header[lang][0].ljust(10), header[lang][1]) mkd.write(mkd_header) mkd_all.write(mkd_header) mkd_new.write(mkd_header_new) mkd_full.write(mkd_header_full) calendar_header = open('../templates/calendar-header-{}.txt'.format(lang)) for line in calendar_header: if (lang in titles): ics.write(line.replace('Lunar Phase', header[lang][4].title())) ics_new.write(line.replace('Lunar Phase', moon_phase_names[lang][0].title())) ics_full.write(line.replace('Lunar Phase', moon_phase_names[lang][4].title())) else: ics.write(line.replace('Lunar Phase', header[lang][4])) ics_new.write(line.replace('Lunar Phase', moon_phase_names[lang][0])) ics_full.write(line.replace('Lunar Phase', moon_phase_names[lang][4])) event_header = for line in open('../templates/event-header.txt'): event_header += line.replace('DTSTAMP:', 'DTSTAMP:{}'.format(dtstamp)) event_footer = for line in open('../templates/event-footer.txt'): event_footer += line today = date.today() start = (today - timedelta(days=(31 + 1))) end = (today + timedelta(days=((2 * 366) + (2 * 31)))) for i in range((end - start).days): day = (start + timedelta(days=i)) (phase, code) = day_to_moon_phase_and_accurate_code(day) symbol = moon_phase_code_to_symbol(code) name = moon_phase_code_to_name(code, lang) tsv_all.write('{}\t{:6.3f}\t{}\t{}\n'.format(day, phase, symbol, name)) mkd_all.write('{} | {:6.3f} | {} | {}\n'.format(day, phase, symbol, name)) if ((code % 2) == 0): tsv.write('{}\t{:6.3f}\t{}\t{}\n'.format(day, phase, symbol, name)) mkd.write('{} | {:6.3f} | {} | {}\n'.format(day, phase, symbol, name)) ics.write('{}{} {}\n'.format(event_header.strip(), symbol, name)) ics.write((uid_format % dict((list(uid_replace_values.items()) + list({'lang': 'nl', 'seq': event_seq}.items()))))) event_seq += 1 ics_start = '{}'.format(day) ics_end = '{}'.format((day + timedelta(days=1))) ics.write('DTSTART;VALUE=DATE:{}\n'.format(ics_start.replace('-', ))) ics.write('DTEND;VALUE=DATE:{}\n'.format(ics_end.replace('-', ))) ics.write(event_footer) if (code == 0): tsv_new.write('{}\t{:6.3f}\n'.format(day, phase)) mkd_new.write('{} | {:6.3f}\n'.format(day, phase)) ics_new.write('{}{} {}\n'.format(event_header.strip(), symbol, name)) ics_new.write((uid_format % dict((list(uid_replace_values.items()) + list({'lang': 'nl', 'seq': event_seq}.items()))))) event_seq += 1 ics_start = '{}'.format(day) ics_end = '{}'.format((day + timedelta(days=1))) ics_new.write('DTSTART;VALUE=DATE:{}\n'.format(ics_start.replace('-', ))) ics_new.write('DTEND;VALUE=DATE:{}\n'.format(ics_end.replace('-', ))) ics_new.write(event_footer) if (code == 4): tsv_full.write('{}\t{:6.3f}\n'.format(day, phase)) mkd_full.write('{} | {:6.3f}\n'.format(day, phase)) ics_full.write('{}{} {}\n'.format(event_header.strip(), symbol, name)) ics_full.write((uid_format % dict((list(uid_replace_values.items()) + list({'lang': 'nl', 'seq': event_seq}.items()))))) event_seq += 1 ics_start = '{}'.format(day) ics_end = '{}'.format((day + timedelta(days=1))) ics_full.write('DTSTART;VALUE=DATE:{}\n'.format(ics_start.replace('-', ))) ics_full.write('DTEND;VALUE=DATE:{}\n'.format(ics_end.replace('-', ))) ics_full.write(event_footer) calendar_footer = open('../templates/calendar-footer.txt') for line in calendar_footer: ics.write(line) ics_new.write(line) ics_full.write(line)<|docstring|>Writes calendar files.<|endoftext|>
0f3a7e68f74c27d41c970f24b6b0d8bc34923155940d5042a213f7866408ed1d
def connect(self) -> None: 'Connect events.' self.parent_frame.pushButton_load_visualizarion.clicked.connect(self.add_subwindow) self.parent_frame.pushButton_visualizations_remove_all.clicked.connect(self.remove_all) self.parent_frame.pushButton_visualizations_reload_all.clicked.connect(self.reload_all) self.parent_frame.tableWidget_anlaysis.itemChanged.connect(self.analisys_status_update) self.parent_frame.pushButton_visualizations_stop_all.clicked.connect(self.stop_all_scripts) self.parent_frame.pushButton_visualizations_restart_all.clicked.connect(self.restart_running_scripts)
Connect events.
bci_framework/framework/environments/visualization.py
connect
UN-GCPDS/bci-framework
2
python
def connect(self) -> None: self.parent_frame.pushButton_load_visualizarion.clicked.connect(self.add_subwindow) self.parent_frame.pushButton_visualizations_remove_all.clicked.connect(self.remove_all) self.parent_frame.pushButton_visualizations_reload_all.clicked.connect(self.reload_all) self.parent_frame.tableWidget_anlaysis.itemChanged.connect(self.analisys_status_update) self.parent_frame.pushButton_visualizations_stop_all.clicked.connect(self.stop_all_scripts) self.parent_frame.pushButton_visualizations_restart_all.clicked.connect(self.restart_running_scripts)
def connect(self) -> None: self.parent_frame.pushButton_load_visualizarion.clicked.connect(self.add_subwindow) self.parent_frame.pushButton_visualizations_remove_all.clicked.connect(self.remove_all) self.parent_frame.pushButton_visualizations_reload_all.clicked.connect(self.reload_all) self.parent_frame.tableWidget_anlaysis.itemChanged.connect(self.analisys_status_update) self.parent_frame.pushButton_visualizations_stop_all.clicked.connect(self.stop_all_scripts) self.parent_frame.pushButton_visualizations_restart_all.clicked.connect(self.restart_running_scripts)<|docstring|>Connect events.<|endoftext|>
fe46b822d7dee87673fefe0456e94a0795329c6284a511562cff50e40d9ff1ec
def on_focus(self) -> None: 'Update mdiAreas.' self.parent_frame.mdiArea.tileSubWindows() self.visualizations_list = [] for i in range(self.parent_frame.listWidget_projects_visualizations.count()): item = self.parent_frame.listWidget_projects_visualizations.item(i) if item.text().startswith('_'): continue if item.text().startswith('Tutorial :'): continue self.visualizations_list.append([item.text(), item.path]) self.build_analysis()
Update mdiAreas.
bci_framework/framework/environments/visualization.py
on_focus
UN-GCPDS/bci-framework
2
python
def on_focus(self) -> None: self.parent_frame.mdiArea.tileSubWindows() self.visualizations_list = [] for i in range(self.parent_frame.listWidget_projects_visualizations.count()): item = self.parent_frame.listWidget_projects_visualizations.item(i) if item.text().startswith('_'): continue if item.text().startswith('Tutorial :'): continue self.visualizations_list.append([item.text(), item.path]) self.build_analysis()
def on_focus(self) -> None: self.parent_frame.mdiArea.tileSubWindows() self.visualizations_list = [] for i in range(self.parent_frame.listWidget_projects_visualizations.count()): item = self.parent_frame.listWidget_projects_visualizations.item(i) if item.text().startswith('_'): continue if item.text().startswith('Tutorial :'): continue self.visualizations_list.append([item.text(), item.path]) self.build_analysis()<|docstring|>Update mdiAreas.<|endoftext|>
5b46b227d50bc46f4d90a030a6921cf2f2a1993ddc8a6e2b2fb68a5c7dbe8880
def reload_all(self) -> None: 'Reload all patitions.' for sub in self.parent_frame.mdiArea.subWindowList(): sub.reload()
Reload all patitions.
bci_framework/framework/environments/visualization.py
reload_all
UN-GCPDS/bci-framework
2
python
def reload_all(self) -> None: for sub in self.parent_frame.mdiArea.subWindowList(): sub.reload()
def reload_all(self) -> None: for sub in self.parent_frame.mdiArea.subWindowList(): sub.reload()<|docstring|>Reload all patitions.<|endoftext|>
674a245a48689eba189e411c9864c5514ffc5194d2e7ef50b5f2fe4c41664fba
def remove_all(self) -> None: 'Remove all patitions.' for sub in self.parent_frame.mdiArea.subWindowList(): sub.remove() QTimer().singleShot(100, self.widgets_set_enabled)
Remove all patitions.
bci_framework/framework/environments/visualization.py
remove_all
UN-GCPDS/bci-framework
2
python
def remove_all(self) -> None: for sub in self.parent_frame.mdiArea.subWindowList(): sub.remove() QTimer().singleShot(100, self.widgets_set_enabled)
def remove_all(self) -> None: for sub in self.parent_frame.mdiArea.subWindowList(): sub.remove() QTimer().singleShot(100, self.widgets_set_enabled)<|docstring|>Remove all patitions.<|endoftext|>
53546d814d91afe16c07309a5a3c89f332f70614bbaad4af5f5ef1e2aa71b7d4
def add_subwindow(self) -> None: 'Add new patition.' sub = ExtensionWidget(self.parent_frame.mdiArea, mode='visualization', extensions_list=self.visualizations_list) self.parent_frame.mdiArea.addSubWindow(sub) sub.show() self.parent_frame.mdiArea.tileSubWindows() sub.update_menu_bar() sub.loaded = self.widgets_set_enabled sub.destroyed.connect(self.widgets_set_enabled) self.widgets_set_enabled()
Add new patition.
bci_framework/framework/environments/visualization.py
add_subwindow
UN-GCPDS/bci-framework
2
python
def add_subwindow(self) -> None: sub = ExtensionWidget(self.parent_frame.mdiArea, mode='visualization', extensions_list=self.visualizations_list) self.parent_frame.mdiArea.addSubWindow(sub) sub.show() self.parent_frame.mdiArea.tileSubWindows() sub.update_menu_bar() sub.loaded = self.widgets_set_enabled sub.destroyed.connect(self.widgets_set_enabled) self.widgets_set_enabled()
def add_subwindow(self) -> None: sub = ExtensionWidget(self.parent_frame.mdiArea, mode='visualization', extensions_list=self.visualizations_list) self.parent_frame.mdiArea.addSubWindow(sub) sub.show() self.parent_frame.mdiArea.tileSubWindows() sub.update_menu_bar() sub.loaded = self.widgets_set_enabled sub.destroyed.connect(self.widgets_set_enabled) self.widgets_set_enabled()<|docstring|>Add new patition.<|endoftext|>
875bd64baab66571a50636f9be0e652867a4453362355ce9e4266191ee931b9d
def widgets_set_enabled(self) -> None: 'Update action buttons.' subwindows = (len(self.parent_frame.mdiArea.subWindowList()) != 0) self.parent_frame.pushButton_visualizations_remove_all.setEnabled(subwindows) self.parent_frame.pushButton_visualizations_reload_all.setEnabled(False) for sub in self.parent_frame.mdiArea.subWindowList(): if getattr(sub, 'stream_subprocess', False): self.parent_frame.pushButton_visualizations_reload_all.setEnabled(True) break
Update action buttons.
bci_framework/framework/environments/visualization.py
widgets_set_enabled
UN-GCPDS/bci-framework
2
python
def widgets_set_enabled(self) -> None: subwindows = (len(self.parent_frame.mdiArea.subWindowList()) != 0) self.parent_frame.pushButton_visualizations_remove_all.setEnabled(subwindows) self.parent_frame.pushButton_visualizations_reload_all.setEnabled(False) for sub in self.parent_frame.mdiArea.subWindowList(): if getattr(sub, 'stream_subprocess', False): self.parent_frame.pushButton_visualizations_reload_all.setEnabled(True) break
def widgets_set_enabled(self) -> None: subwindows = (len(self.parent_frame.mdiArea.subWindowList()) != 0) self.parent_frame.pushButton_visualizations_remove_all.setEnabled(subwindows) self.parent_frame.pushButton_visualizations_reload_all.setEnabled(False) for sub in self.parent_frame.mdiArea.subWindowList(): if getattr(sub, 'stream_subprocess', False): self.parent_frame.pushButton_visualizations_reload_all.setEnabled(True) break<|docstring|>Update action buttons.<|endoftext|>
0000e3c5e1f7b4f770135241b1513d239c17d74d6389076cf79889d618a891a3
def has_repeat_digits(grid: list[list[str]]) -> bool: 'Returns if given 2d grid has repeating digit strings' digit_strings = {str(i) for i in range(1, 10)} for row in grid: for digit in row: if (digit == '.'): continue if (digit not in digit_strings): return True digit_strings.remove(digit) return False
Returns if given 2d grid has repeating digit strings
valid_sudoku.py
has_repeat_digits
tusharsadhwani/leetcode
6
python
def has_repeat_digits(grid: list[list[str]]) -> bool: digit_strings = {str(i) for i in range(1, 10)} for row in grid: for digit in row: if (digit == '.'): continue if (digit not in digit_strings): return True digit_strings.remove(digit) return False
def has_repeat_digits(grid: list[list[str]]) -> bool: digit_strings = {str(i) for i in range(1, 10)} for row in grid: for digit in row: if (digit == '.'): continue if (digit not in digit_strings): return True digit_strings.remove(digit) return False<|docstring|>Returns if given 2d grid has repeating digit strings<|endoftext|>
6576d2b91e1820a7d04d28da94281ca01d0e4d32716362130fc705ee94570a85
def check_attribute_conflict(label_batch, attr, attrs): ' Based on https://github.com/LynnHo/AttGAN-Tensorflow' def _set(label, value, attr): if (attr in attrs): label[attrs.index(attr)] = value attr_id = attrs.index(attr) for label in label_batch: if ((attr in ['Bald', 'Receding_Hairline']) and (attrs[attr_id] != 0)): _set(label, 0, 'Bangs') elif ((attr == 'Bangs') and (attrs[attr_id] != 0)): _set(label, 0, 'Bald') _set(label, 0, 'Receding_Hairline') elif ((attr in ['Black_Hair', 'Blond_Hair', 'Brown_Hair', 'Gray_Hair']) and (attrs[attr_id] != 0)): for a in ['Black_Hair', 'Blond_Hair', 'Brown_Hair', 'Gray_Hair']: if (a != attr): _set(label, 0, a) elif ((attr in ['Straight_Hair', 'Wavy_Hair']) and (attrs[attr_id] != 0)): for a in ['Straight_Hair', 'Wavy_Hair']: if (a != attr): _set(label, 0, a) return label_batch
Based on https://github.com/LynnHo/AttGAN-Tensorflow
PaddleCV/PaddleGAN/util/utility.py
check_attribute_conflict
liuzengzhen1/models
3
python
def check_attribute_conflict(label_batch, attr, attrs): ' ' def _set(label, value, attr): if (attr in attrs): label[attrs.index(attr)] = value attr_id = attrs.index(attr) for label in label_batch: if ((attr in ['Bald', 'Receding_Hairline']) and (attrs[attr_id] != 0)): _set(label, 0, 'Bangs') elif ((attr == 'Bangs') and (attrs[attr_id] != 0)): _set(label, 0, 'Bald') _set(label, 0, 'Receding_Hairline') elif ((attr in ['Black_Hair', 'Blond_Hair', 'Brown_Hair', 'Gray_Hair']) and (attrs[attr_id] != 0)): for a in ['Black_Hair', 'Blond_Hair', 'Brown_Hair', 'Gray_Hair']: if (a != attr): _set(label, 0, a) elif ((attr in ['Straight_Hair', 'Wavy_Hair']) and (attrs[attr_id] != 0)): for a in ['Straight_Hair', 'Wavy_Hair']: if (a != attr): _set(label, 0, a) return label_batch
def check_attribute_conflict(label_batch, attr, attrs): ' ' def _set(label, value, attr): if (attr in attrs): label[attrs.index(attr)] = value attr_id = attrs.index(attr) for label in label_batch: if ((attr in ['Bald', 'Receding_Hairline']) and (attrs[attr_id] != 0)): _set(label, 0, 'Bangs') elif ((attr == 'Bangs') and (attrs[attr_id] != 0)): _set(label, 0, 'Bald') _set(label, 0, 'Receding_Hairline') elif ((attr in ['Black_Hair', 'Blond_Hair', 'Brown_Hair', 'Gray_Hair']) and (attrs[attr_id] != 0)): for a in ['Black_Hair', 'Blond_Hair', 'Brown_Hair', 'Gray_Hair']: if (a != attr): _set(label, 0, a) elif ((attr in ['Straight_Hair', 'Wavy_Hair']) and (attrs[attr_id] != 0)): for a in ['Straight_Hair', 'Wavy_Hair']: if (a != attr): _set(label, 0, a) return label_batch<|docstring|>Based on https://github.com/LynnHo/AttGAN-Tensorflow<|endoftext|>
9d79e42de93e120b8f0912b2c568eba9636c451a3ec5f0743b74042672cb5ad0
def check_gpu(use_gpu): '\n Log error and exit when set use_gpu=true in paddlepaddle\n cpu version.\n ' err = 'Config use_gpu cannot be set as true while you are using paddlepaddle cpu version ! \nPlease try: \n\t1. Install paddlepaddle-gpu to run model on GPU \n\t2. Set use_gpu as false in config file to run model on CPU' try: if (use_gpu and (not fluid.is_compiled_with_cuda())): print(err) sys.exit(1) except Exception as e: pass
Log error and exit when set use_gpu=true in paddlepaddle cpu version.
PaddleCV/PaddleGAN/util/utility.py
check_gpu
liuzengzhen1/models
3
python
def check_gpu(use_gpu): '\n Log error and exit when set use_gpu=true in paddlepaddle\n cpu version.\n ' err = 'Config use_gpu cannot be set as true while you are using paddlepaddle cpu version ! \nPlease try: \n\t1. Install paddlepaddle-gpu to run model on GPU \n\t2. Set use_gpu as false in config file to run model on CPU' try: if (use_gpu and (not fluid.is_compiled_with_cuda())): print(err) sys.exit(1) except Exception as e: pass
def check_gpu(use_gpu): '\n Log error and exit when set use_gpu=true in paddlepaddle\n cpu version.\n ' err = 'Config use_gpu cannot be set as true while you are using paddlepaddle cpu version ! \nPlease try: \n\t1. Install paddlepaddle-gpu to run model on GPU \n\t2. Set use_gpu as false in config file to run model on CPU' try: if (use_gpu and (not fluid.is_compiled_with_cuda())): print(err) sys.exit(1) except Exception as e: pass<|docstring|>Log error and exit when set use_gpu=true in paddlepaddle cpu version.<|endoftext|>
6198b17435b578946f25a72071091b12c1b5278b1a6761ab736c8211378acb18
def phonemes_to_mels(self, phoneme_ids: np.ndarray, settings: typing.Optional[SettingsType]=None) -> ARRAY_OR_TENSOR: 'Convert phoneme ids to mel spectrograms' pass
Convert phoneme ids to mel spectrograms
larynx/constants.py
phonemes_to_mels
mbarnig/larynx
540
python
def phonemes_to_mels(self, phoneme_ids: np.ndarray, settings: typing.Optional[SettingsType]=None) -> ARRAY_OR_TENSOR: pass
def phonemes_to_mels(self, phoneme_ids: np.ndarray, settings: typing.Optional[SettingsType]=None) -> ARRAY_OR_TENSOR: pass<|docstring|>Convert phoneme ids to mel spectrograms<|endoftext|>
e0f0b21f47be10e0bd0976a2883247fbfb0b6e596edb487be3b7b32a0446529a
def mels_to_audio(self, mels: ARRAY_OR_TENSOR, settings: typing.Optional[SettingsType]=None) -> np.ndarray: 'Convert mel spectrograms to WAV audio' pass
Convert mel spectrograms to WAV audio
larynx/constants.py
mels_to_audio
mbarnig/larynx
540
python
def mels_to_audio(self, mels: ARRAY_OR_TENSOR, settings: typing.Optional[SettingsType]=None) -> np.ndarray: pass
def mels_to_audio(self, mels: ARRAY_OR_TENSOR, settings: typing.Optional[SettingsType]=None) -> np.ndarray: pass<|docstring|>Convert mel spectrograms to WAV audio<|endoftext|>
2817f1ac857ad78be1d45629e5326880d71542d465bd2d1d1f4a561355f78d0f
def __init__(self, *args, **kwargs): '\n Create the request client instance.\n :param kwargs: The option of request connection.\n api_key: The public key applied from Huobi.\n secret_key: The private key applied from Huobi.\n url: The URL name like "https://api.huobi.pro".\n init_log: to init logger\n ' self.__kwargs = kwargs self.rest_api_sync_client = RestApiSyncClient(*args, **kwargs) self.web_socket_req_client = WebSocketReqClient(*args, **kwargs) self.sub_socket_req_client = SubscribeClient(*args, **kwargs)
Create the request client instance. :param kwargs: The option of request connection. api_key: The public key applied from Huobi. secret_key: The private key applied from Huobi. url: The URL name like "https://api.huobi.pro". init_log: to init logger
notecoin/huobi/client/generic.py
__init__
notechats/notecoin
0
python
def __init__(self, *args, **kwargs): '\n Create the request client instance.\n :param kwargs: The option of request connection.\n api_key: The public key applied from Huobi.\n secret_key: The private key applied from Huobi.\n url: The URL name like "https://api.huobi.pro".\n init_log: to init logger\n ' self.__kwargs = kwargs self.rest_api_sync_client = RestApiSyncClient(*args, **kwargs) self.web_socket_req_client = WebSocketReqClient(*args, **kwargs) self.sub_socket_req_client = SubscribeClient(*args, **kwargs)
def __init__(self, *args, **kwargs): '\n Create the request client instance.\n :param kwargs: The option of request connection.\n api_key: The public key applied from Huobi.\n secret_key: The private key applied from Huobi.\n url: The URL name like "https://api.huobi.pro".\n init_log: to init logger\n ' self.__kwargs = kwargs self.rest_api_sync_client = RestApiSyncClient(*args, **kwargs) self.web_socket_req_client = WebSocketReqClient(*args, **kwargs) self.sub_socket_req_client = SubscribeClient(*args, **kwargs)<|docstring|>Create the request client instance. :param kwargs: The option of request connection. api_key: The public key applied from Huobi. secret_key: The private key applied from Huobi. url: The URL name like "https://api.huobi.pro". init_log: to init logger<|endoftext|>
ba0733611bcf045fc97229ebf63dae9bdb24e3d17a2757639e6487c20488a607
def get_exchange_timestamp(self) -> int: '\n Get the timestamp from Huobi server. The timestamp is the Unix timestamp in millisecond.\n The count shows how many milliseconds passed from Jan 1st 1970, 00:00:00.000 at UTC.\n e.g. 1546300800000 is Thu, 1st Jan 2019 00:00:00.000 UTC.\n\n :return: The timestamp in UTC\n ' channel = '/v1/common/timestamp' params = {} return self.rest_api_sync_client.request_process(HttpMethod.GET, channel, params)
Get the timestamp from Huobi server. The timestamp is the Unix timestamp in millisecond. The count shows how many milliseconds passed from Jan 1st 1970, 00:00:00.000 at UTC. e.g. 1546300800000 is Thu, 1st Jan 2019 00:00:00.000 UTC. :return: The timestamp in UTC
notecoin/huobi/client/generic.py
get_exchange_timestamp
notechats/notecoin
0
python
def get_exchange_timestamp(self) -> int: '\n Get the timestamp from Huobi server. The timestamp is the Unix timestamp in millisecond.\n The count shows how many milliseconds passed from Jan 1st 1970, 00:00:00.000 at UTC.\n e.g. 1546300800000 is Thu, 1st Jan 2019 00:00:00.000 UTC.\n\n :return: The timestamp in UTC\n ' channel = '/v1/common/timestamp' params = {} return self.rest_api_sync_client.request_process(HttpMethod.GET, channel, params)
def get_exchange_timestamp(self) -> int: '\n Get the timestamp from Huobi server. The timestamp is the Unix timestamp in millisecond.\n The count shows how many milliseconds passed from Jan 1st 1970, 00:00:00.000 at UTC.\n e.g. 1546300800000 is Thu, 1st Jan 2019 00:00:00.000 UTC.\n\n :return: The timestamp in UTC\n ' channel = '/v1/common/timestamp' params = {} return self.rest_api_sync_client.request_process(HttpMethod.GET, channel, params)<|docstring|>Get the timestamp from Huobi server. The timestamp is the Unix timestamp in millisecond. The count shows how many milliseconds passed from Jan 1st 1970, 00:00:00.000 at UTC. e.g. 1546300800000 is Thu, 1st Jan 2019 00:00:00.000 UTC. :return: The timestamp in UTC<|endoftext|>
76c146e5aafa06b6b23feba32b8d4ed0a646b19e3a0bff1fd5cb816e3fa5ee34
def get_exchange_currencies(self): '\n Get all the trading assets and currencies supported in huobi.\n The information of trading instrument, including base currency, quote precision, etc.\n\n :return: The information of trading currencies.\n ' channel = '/v1/common/currencys' params = {} return self.rest_api_sync_client.request_process(HttpMethod.GET, channel, params)
Get all the trading assets and currencies supported in huobi. The information of trading instrument, including base currency, quote precision, etc. :return: The information of trading currencies.
notecoin/huobi/client/generic.py
get_exchange_currencies
notechats/notecoin
0
python
def get_exchange_currencies(self): '\n Get all the trading assets and currencies supported in huobi.\n The information of trading instrument, including base currency, quote precision, etc.\n\n :return: The information of trading currencies.\n ' channel = '/v1/common/currencys' params = {} return self.rest_api_sync_client.request_process(HttpMethod.GET, channel, params)
def get_exchange_currencies(self): '\n Get all the trading assets and currencies supported in huobi.\n The information of trading instrument, including base currency, quote precision, etc.\n\n :return: The information of trading currencies.\n ' channel = '/v1/common/currencys' params = {} return self.rest_api_sync_client.request_process(HttpMethod.GET, channel, params)<|docstring|>Get all the trading assets and currencies supported in huobi. The information of trading instrument, including base currency, quote precision, etc. :return: The information of trading currencies.<|endoftext|>
e72f21633f13f027ee2557008328b6057b1512b232dc31038a395cf674caea10
def get_exchange_symbols(self): '\n Get all the trading assets and currencies supported in huobi.\n The information of trading instrument etc.\n\n :return: The information of trading instrument.\n ' channel = '/v1/common/symbols' params = {} return self.rest_api_sync_client.request_process(HttpMethod.GET, channel, params)
Get all the trading assets and currencies supported in huobi. The information of trading instrument etc. :return: The information of trading instrument.
notecoin/huobi/client/generic.py
get_exchange_symbols
notechats/notecoin
0
python
def get_exchange_symbols(self): '\n Get all the trading assets and currencies supported in huobi.\n The information of trading instrument etc.\n\n :return: The information of trading instrument.\n ' channel = '/v1/common/symbols' params = {} return self.rest_api_sync_client.request_process(HttpMethod.GET, channel, params)
def get_exchange_symbols(self): '\n Get all the trading assets and currencies supported in huobi.\n The information of trading instrument etc.\n\n :return: The information of trading instrument.\n ' channel = '/v1/common/symbols' params = {} return self.rest_api_sync_client.request_process(HttpMethod.GET, channel, params)<|docstring|>Get all the trading assets and currencies supported in huobi. The information of trading instrument etc. :return: The information of trading instrument.<|endoftext|>
de79906dda3e5b1f05f2c572d31dce8da38c8930eec5ceaba26df1aba009e0d2
def get_exchange_info(self): '\n Get all the trading assets and currencies supported in huobi.\n The information of trading instrument, including base currency, quote precision, etc.\n\n :return: The information of trading instrument and currencies.\n ' ret = {'symbol_list': self.get_exchange_symbols(), 'currencies': self.get_exchange_currencies()} return ret
Get all the trading assets and currencies supported in huobi. The information of trading instrument, including base currency, quote precision, etc. :return: The information of trading instrument and currencies.
notecoin/huobi/client/generic.py
get_exchange_info
notechats/notecoin
0
python
def get_exchange_info(self): '\n Get all the trading assets and currencies supported in huobi.\n The information of trading instrument, including base currency, quote precision, etc.\n\n :return: The information of trading instrument and currencies.\n ' ret = {'symbol_list': self.get_exchange_symbols(), 'currencies': self.get_exchange_currencies()} return ret
def get_exchange_info(self): '\n Get all the trading assets and currencies supported in huobi.\n The information of trading instrument, including base currency, quote precision, etc.\n\n :return: The information of trading instrument and currencies.\n ' ret = {'symbol_list': self.get_exchange_symbols(), 'currencies': self.get_exchange_currencies()} return ret<|docstring|>Get all the trading assets and currencies supported in huobi. The information of trading instrument, including base currency, quote precision, etc. :return: The information of trading instrument and currencies.<|endoftext|>
8c7342aa32732b79d3ec99cca4809114c4c20aaef36fbbe4f263246b11e50f62
def get_reference_currencies(self, currency: 'str'=None, is_authorized_user: 'bool'=None) -> list: '\n Get all the trading assets and currencies supported in huobi.\n The information of trading instrument, including base currency, quote precision, etc.\n\n :param currency: btc, ltc, bch, eth, etc ...(available currencies in Huobi Global)\n :param is_authorized_user: is Authorized user? True or False\n :return: The information of trading instrument and currencies.\n ' channel = '/v2/reference/currencies' params = {'currency': currency, 'authorizedUser': is_authorized_user} return self.rest_api_sync_client.request_process(HttpMethod.GET, channel, params)
Get all the trading assets and currencies supported in huobi. The information of trading instrument, including base currency, quote precision, etc. :param currency: btc, ltc, bch, eth, etc ...(available currencies in Huobi Global) :param is_authorized_user: is Authorized user? True or False :return: The information of trading instrument and currencies.
notecoin/huobi/client/generic.py
get_reference_currencies
notechats/notecoin
0
python
def get_reference_currencies(self, currency: 'str'=None, is_authorized_user: 'bool'=None) -> list: '\n Get all the trading assets and currencies supported in huobi.\n The information of trading instrument, including base currency, quote precision, etc.\n\n :param currency: btc, ltc, bch, eth, etc ...(available currencies in Huobi Global)\n :param is_authorized_user: is Authorized user? True or False\n :return: The information of trading instrument and currencies.\n ' channel = '/v2/reference/currencies' params = {'currency': currency, 'authorizedUser': is_authorized_user} return self.rest_api_sync_client.request_process(HttpMethod.GET, channel, params)
def get_reference_currencies(self, currency: 'str'=None, is_authorized_user: 'bool'=None) -> list: '\n Get all the trading assets and currencies supported in huobi.\n The information of trading instrument, including base currency, quote precision, etc.\n\n :param currency: btc, ltc, bch, eth, etc ...(available currencies in Huobi Global)\n :param is_authorized_user: is Authorized user? True or False\n :return: The information of trading instrument and currencies.\n ' channel = '/v2/reference/currencies' params = {'currency': currency, 'authorizedUser': is_authorized_user} return self.rest_api_sync_client.request_process(HttpMethod.GET, channel, params)<|docstring|>Get all the trading assets and currencies supported in huobi. The information of trading instrument, including base currency, quote precision, etc. :param currency: btc, ltc, bch, eth, etc ...(available currencies in Huobi Global) :param is_authorized_user: is Authorized user? True or False :return: The information of trading instrument and currencies.<|endoftext|>
9e31e558f40a620afcfa335ec16b566bf1a13a5cbcd9c641224b7b5898af664f
def get_system_status(self) -> str: '\n get system status\n\n :return: system status.\n ' channel = '/api/v2/summary.json' temp = self.rest_api_sync_client.__server_url self.rest_api_sync_client.__server_url = 'https://status.huobigroup.com' res = self.rest_api_sync_client.request_process(HttpMethod.GET, channel, {}) self.rest_api_sync_client.__server_url = temp return res
get system status :return: system status.
notecoin/huobi/client/generic.py
get_system_status
notechats/notecoin
0
python
def get_system_status(self) -> str: '\n get system status\n\n :return: system status.\n ' channel = '/api/v2/summary.json' temp = self.rest_api_sync_client.__server_url self.rest_api_sync_client.__server_url = 'https://status.huobigroup.com' res = self.rest_api_sync_client.request_process(HttpMethod.GET, channel, {}) self.rest_api_sync_client.__server_url = temp return res
def get_system_status(self) -> str: '\n get system status\n\n :return: system status.\n ' channel = '/api/v2/summary.json' temp = self.rest_api_sync_client.__server_url self.rest_api_sync_client.__server_url = 'https://status.huobigroup.com' res = self.rest_api_sync_client.request_process(HttpMethod.GET, channel, {}) self.rest_api_sync_client.__server_url = temp return res<|docstring|>get system status :return: system status.<|endoftext|>
414a2249071fac971bcb1cf3d26bad229dff3cf6496c463703d483876a1b6760
def is_virginica_test(fi, t, reverse, example): 'Apply threshold model to a new example' test = (example[fi] > t) if reverse: test = (not test) return test
Apply threshold model to a new example
ch02/chapter.py
is_virginica_test
Jonkimi/BuildingMachineLearningSystemsWithPython
1,490
python
def is_virginica_test(fi, t, reverse, example): test = (example[fi] > t) if reverse: test = (not test) return test
def is_virginica_test(fi, t, reverse, example): test = (example[fi] > t) if reverse: test = (not test) return test<|docstring|>Apply threshold model to a new example<|endoftext|>
e6c2655a890905eb69fd72841251abe42fe8a236cd00b4764fc906d8cfa72feb
def __init__(self, jwk_endpoint: str, api_logout_url: str, **kwargs): 'Construct a OAuth 2 client session.' self.jwk_endpoint = jwk_endpoint self.api_logout_url = api_logout_url super(OAuth2Session, self).__init__(**kwargs)
Construct a OAuth 2 client session.
src/auth_api/oidc/session.py
__init__
Energinet-DataHub/po-auth
1
python
def __init__(self, jwk_endpoint: str, api_logout_url: str, **kwargs): self.jwk_endpoint = jwk_endpoint self.api_logout_url = api_logout_url super(OAuth2Session, self).__init__(**kwargs)
def __init__(self, jwk_endpoint: str, api_logout_url: str, **kwargs): self.jwk_endpoint = jwk_endpoint self.api_logout_url = api_logout_url super(OAuth2Session, self).__init__(**kwargs)<|docstring|>Construct a OAuth 2 client session.<|endoftext|>
2e4d5c14a123ddecedd920545e38e75f62b13675c667189cd30c17dbe3533b33
def get_jwk(self) -> str: 'TODO.' jwks_response = requests.get(url=self.jwk_endpoint, verify=True) return jwks_response.content.decode()
TODO.
src/auth_api/oidc/session.py
get_jwk
Energinet-DataHub/po-auth
1
python
def get_jwk(self) -> str: jwks_response = requests.get(url=self.jwk_endpoint, verify=True) return jwks_response.content.decode()
def get_jwk(self) -> str: jwks_response = requests.get(url=self.jwk_endpoint, verify=True) return jwks_response.content.decode()<|docstring|>TODO.<|endoftext|>
f811eec348ce6a079348c3cd98d825e0f9d6a41cb9d5a6bcc63ffd0a99ad9365
def logout(self, id_token: str): '\n Logout the user from used Identity Provider.\n\n Provided an ID-token, this method invokes the back-channel logout\n endpoint on the Identity Provider, which logs the user out on\n their side, forcing the user to login again next time he is\n redirected to the authorization URL.\n ' response = requests.post(url=self.api_logout_url, json={'id_token': id_token}) if (response.status_code != 200): raise RuntimeError(f'Logout returned status {response.status_code}')
Logout the user from used Identity Provider. Provided an ID-token, this method invokes the back-channel logout endpoint on the Identity Provider, which logs the user out on their side, forcing the user to login again next time he is redirected to the authorization URL.
src/auth_api/oidc/session.py
logout
Energinet-DataHub/po-auth
1
python
def logout(self, id_token: str): '\n Logout the user from used Identity Provider.\n\n Provided an ID-token, this method invokes the back-channel logout\n endpoint on the Identity Provider, which logs the user out on\n their side, forcing the user to login again next time he is\n redirected to the authorization URL.\n ' response = requests.post(url=self.api_logout_url, json={'id_token': id_token}) if (response.status_code != 200): raise RuntimeError(f'Logout returned status {response.status_code}')
def logout(self, id_token: str): '\n Logout the user from used Identity Provider.\n\n Provided an ID-token, this method invokes the back-channel logout\n endpoint on the Identity Provider, which logs the user out on\n their side, forcing the user to login again next time he is\n redirected to the authorization URL.\n ' response = requests.post(url=self.api_logout_url, json={'id_token': id_token}) if (response.status_code != 200): raise RuntimeError(f'Logout returned status {response.status_code}')<|docstring|>Logout the user from used Identity Provider. Provided an ID-token, this method invokes the back-channel logout endpoint on the Identity Provider, which logs the user out on their side, forcing the user to login again next time he is redirected to the authorization URL.<|endoftext|>
6b59f62375d39308fa1dca942a5dc2ade98d6656118f7e18d740bac51fe42dc2
def __init__(self, keys: KeysCollection, output_postfixes: Sequence[str], to_onehot: Union[(Sequence[bool], bool)]=False, num_classes: Optional[Union[(Sequence[int], int)]]=None) -> None: '\n Args:\n keys: keys of the corresponding items to be transformed.\n See also: :py:class:`monai.transforms.compose.MapTransform`\n output_postfixes: the postfixes to construct keys to store split data.\n for example: if the key of input data is `pred` and split 2 classes, the output\n data keys will be: pred_(output_postfixes[0]), pred_(output_postfixes[1])\n to_onehot: whether to convert the data to One-Hot format, default is False.\n it also can be a sequence of bool, each element corresponds to a key in ``keys``.\n num_classes: the class number used to convert to One-Hot format\n if `to_onehot` is True. it also can be a sequence of int, each element corresponds\n to a key in ``keys``.\n\n ' super().__init__(keys) self.output_postfixes = output_postfixes self.to_onehot = ensure_tuple_rep(to_onehot, len(self.keys)) self.num_classes = ensure_tuple_rep(num_classes, len(self.keys)) self.splitter = SplitChannel()
Args: keys: keys of the corresponding items to be transformed. See also: :py:class:`monai.transforms.compose.MapTransform` output_postfixes: the postfixes to construct keys to store split data. for example: if the key of input data is `pred` and split 2 classes, the output data keys will be: pred_(output_postfixes[0]), pred_(output_postfixes[1]) to_onehot: whether to convert the data to One-Hot format, default is False. it also can be a sequence of bool, each element corresponds to a key in ``keys``. num_classes: the class number used to convert to One-Hot format if `to_onehot` is True. it also can be a sequence of int, each element corresponds to a key in ``keys``.
monai/transforms/post/dictionary.py
__init__
dzenanz/MONAI
3
python
def __init__(self, keys: KeysCollection, output_postfixes: Sequence[str], to_onehot: Union[(Sequence[bool], bool)]=False, num_classes: Optional[Union[(Sequence[int], int)]]=None) -> None: '\n Args:\n keys: keys of the corresponding items to be transformed.\n See also: :py:class:`monai.transforms.compose.MapTransform`\n output_postfixes: the postfixes to construct keys to store split data.\n for example: if the key of input data is `pred` and split 2 classes, the output\n data keys will be: pred_(output_postfixes[0]), pred_(output_postfixes[1])\n to_onehot: whether to convert the data to One-Hot format, default is False.\n it also can be a sequence of bool, each element corresponds to a key in ``keys``.\n num_classes: the class number used to convert to One-Hot format\n if `to_onehot` is True. it also can be a sequence of int, each element corresponds\n to a key in ``keys``.\n\n ' super().__init__(keys) self.output_postfixes = output_postfixes self.to_onehot = ensure_tuple_rep(to_onehot, len(self.keys)) self.num_classes = ensure_tuple_rep(num_classes, len(self.keys)) self.splitter = SplitChannel()
def __init__(self, keys: KeysCollection, output_postfixes: Sequence[str], to_onehot: Union[(Sequence[bool], bool)]=False, num_classes: Optional[Union[(Sequence[int], int)]]=None) -> None: '\n Args:\n keys: keys of the corresponding items to be transformed.\n See also: :py:class:`monai.transforms.compose.MapTransform`\n output_postfixes: the postfixes to construct keys to store split data.\n for example: if the key of input data is `pred` and split 2 classes, the output\n data keys will be: pred_(output_postfixes[0]), pred_(output_postfixes[1])\n to_onehot: whether to convert the data to One-Hot format, default is False.\n it also can be a sequence of bool, each element corresponds to a key in ``keys``.\n num_classes: the class number used to convert to One-Hot format\n if `to_onehot` is True. it also can be a sequence of int, each element corresponds\n to a key in ``keys``.\n\n ' super().__init__(keys) self.output_postfixes = output_postfixes self.to_onehot = ensure_tuple_rep(to_onehot, len(self.keys)) self.num_classes = ensure_tuple_rep(num_classes, len(self.keys)) self.splitter = SplitChannel()<|docstring|>Args: keys: keys of the corresponding items to be transformed. See also: :py:class:`monai.transforms.compose.MapTransform` output_postfixes: the postfixes to construct keys to store split data. for example: if the key of input data is `pred` and split 2 classes, the output data keys will be: pred_(output_postfixes[0]), pred_(output_postfixes[1]) to_onehot: whether to convert the data to One-Hot format, default is False. it also can be a sequence of bool, each element corresponds to a key in ``keys``. num_classes: the class number used to convert to One-Hot format if `to_onehot` is True. it also can be a sequence of int, each element corresponds to a key in ``keys``.<|endoftext|>
98480aaea2c74b9f5dcfbcbab1f0a46c21e2b01542f59fc1837009ed3a67faae
def __init__(self, keys: KeysCollection, sigmoid: Union[(Sequence[bool], bool)]=False, softmax: Union[(Sequence[bool], bool)]=False, other: Optional[Union[(Sequence[Callable], Callable)]]=None) -> None: '\n Args:\n keys: keys of the corresponding items to model output and label.\n See also: :py:class:`monai.transforms.compose.MapTransform`\n sigmoid: whether to execute sigmoid function on model output before transform.\n it also can be a sequence of bool, each element corresponds to a key in ``keys``.\n softmax: whether to execute softmax function on model output before transform.\n it also can be a sequence of bool, each element corresponds to a key in ``keys``.\n other: callable function to execute other activation layers,\n for example: `other = lambda x: torch.tanh(x)`. it also can be a sequence of Callable, each\n element corresponds to a key in ``keys``.\n\n ' super().__init__(keys) self.sigmoid = ensure_tuple_rep(sigmoid, len(self.keys)) self.softmax = ensure_tuple_rep(softmax, len(self.keys)) self.other = ensure_tuple_rep(other, len(self.keys)) self.converter = Activations()
Args: keys: keys of the corresponding items to model output and label. See also: :py:class:`monai.transforms.compose.MapTransform` sigmoid: whether to execute sigmoid function on model output before transform. it also can be a sequence of bool, each element corresponds to a key in ``keys``. softmax: whether to execute softmax function on model output before transform. it also can be a sequence of bool, each element corresponds to a key in ``keys``. other: callable function to execute other activation layers, for example: `other = lambda x: torch.tanh(x)`. it also can be a sequence of Callable, each element corresponds to a key in ``keys``.
monai/transforms/post/dictionary.py
__init__
dzenanz/MONAI
3
python
def __init__(self, keys: KeysCollection, sigmoid: Union[(Sequence[bool], bool)]=False, softmax: Union[(Sequence[bool], bool)]=False, other: Optional[Union[(Sequence[Callable], Callable)]]=None) -> None: '\n Args:\n keys: keys of the corresponding items to model output and label.\n See also: :py:class:`monai.transforms.compose.MapTransform`\n sigmoid: whether to execute sigmoid function on model output before transform.\n it also can be a sequence of bool, each element corresponds to a key in ``keys``.\n softmax: whether to execute softmax function on model output before transform.\n it also can be a sequence of bool, each element corresponds to a key in ``keys``.\n other: callable function to execute other activation layers,\n for example: `other = lambda x: torch.tanh(x)`. it also can be a sequence of Callable, each\n element corresponds to a key in ``keys``.\n\n ' super().__init__(keys) self.sigmoid = ensure_tuple_rep(sigmoid, len(self.keys)) self.softmax = ensure_tuple_rep(softmax, len(self.keys)) self.other = ensure_tuple_rep(other, len(self.keys)) self.converter = Activations()
def __init__(self, keys: KeysCollection, sigmoid: Union[(Sequence[bool], bool)]=False, softmax: Union[(Sequence[bool], bool)]=False, other: Optional[Union[(Sequence[Callable], Callable)]]=None) -> None: '\n Args:\n keys: keys of the corresponding items to model output and label.\n See also: :py:class:`monai.transforms.compose.MapTransform`\n sigmoid: whether to execute sigmoid function on model output before transform.\n it also can be a sequence of bool, each element corresponds to a key in ``keys``.\n softmax: whether to execute softmax function on model output before transform.\n it also can be a sequence of bool, each element corresponds to a key in ``keys``.\n other: callable function to execute other activation layers,\n for example: `other = lambda x: torch.tanh(x)`. it also can be a sequence of Callable, each\n element corresponds to a key in ``keys``.\n\n ' super().__init__(keys) self.sigmoid = ensure_tuple_rep(sigmoid, len(self.keys)) self.softmax = ensure_tuple_rep(softmax, len(self.keys)) self.other = ensure_tuple_rep(other, len(self.keys)) self.converter = Activations()<|docstring|>Args: keys: keys of the corresponding items to model output and label. See also: :py:class:`monai.transforms.compose.MapTransform` sigmoid: whether to execute sigmoid function on model output before transform. it also can be a sequence of bool, each element corresponds to a key in ``keys``. softmax: whether to execute softmax function on model output before transform. it also can be a sequence of bool, each element corresponds to a key in ``keys``. other: callable function to execute other activation layers, for example: `other = lambda x: torch.tanh(x)`. it also can be a sequence of Callable, each element corresponds to a key in ``keys``.<|endoftext|>
379fe53e1c9c2c5f05034f008d7e1b9ea61655a21e32f901bacc135b9c301ee1
def __init__(self, keys: KeysCollection, argmax: Union[(Sequence[bool], bool)]=False, to_onehot: Union[(Sequence[bool], bool)]=False, n_classes: Optional[Union[(Sequence[int], int)]]=None, threshold_values: Union[(Sequence[bool], bool)]=False, logit_thresh: Union[(Sequence[float], float)]=0.5) -> None: '\n Args:\n keys: keys of the corresponding items to model output and label.\n See also: :py:class:`monai.transforms.compose.MapTransform`\n argmax: whether to execute argmax function on input data before transform.\n it also can be a sequence of bool, each element corresponds to a key in ``keys``.\n to_onehot: whether to convert input data into the one-hot format. Defaults to False.\n it also can be a sequence of bool, each element corresponds to a key in ``keys``.\n n_classes: the number of classes to convert to One-Hot format. it also can be a\n sequence of int, each element corresponds to a key in ``keys``.\n threshold_values: whether threshold the float value to int number 0 or 1, default is False.\n it also can be a sequence of bool, each element corresponds to a key in ``keys``.\n logit_thresh: the threshold value for thresholding operation, default is 0.5.\n it also can be a sequence of float, each element corresponds to a key in ``keys``.\n\n ' super().__init__(keys) self.argmax = ensure_tuple_rep(argmax, len(self.keys)) self.to_onehot = ensure_tuple_rep(to_onehot, len(self.keys)) self.n_classes = ensure_tuple_rep(n_classes, len(self.keys)) self.threshold_values = ensure_tuple_rep(threshold_values, len(self.keys)) self.logit_thresh = ensure_tuple_rep(logit_thresh, len(self.keys)) self.converter = AsDiscrete()
Args: keys: keys of the corresponding items to model output and label. See also: :py:class:`monai.transforms.compose.MapTransform` argmax: whether to execute argmax function on input data before transform. it also can be a sequence of bool, each element corresponds to a key in ``keys``. to_onehot: whether to convert input data into the one-hot format. Defaults to False. it also can be a sequence of bool, each element corresponds to a key in ``keys``. n_classes: the number of classes to convert to One-Hot format. it also can be a sequence of int, each element corresponds to a key in ``keys``. threshold_values: whether threshold the float value to int number 0 or 1, default is False. it also can be a sequence of bool, each element corresponds to a key in ``keys``. logit_thresh: the threshold value for thresholding operation, default is 0.5. it also can be a sequence of float, each element corresponds to a key in ``keys``.
monai/transforms/post/dictionary.py
__init__
dzenanz/MONAI
3
python
def __init__(self, keys: KeysCollection, argmax: Union[(Sequence[bool], bool)]=False, to_onehot: Union[(Sequence[bool], bool)]=False, n_classes: Optional[Union[(Sequence[int], int)]]=None, threshold_values: Union[(Sequence[bool], bool)]=False, logit_thresh: Union[(Sequence[float], float)]=0.5) -> None: '\n Args:\n keys: keys of the corresponding items to model output and label.\n See also: :py:class:`monai.transforms.compose.MapTransform`\n argmax: whether to execute argmax function on input data before transform.\n it also can be a sequence of bool, each element corresponds to a key in ``keys``.\n to_onehot: whether to convert input data into the one-hot format. Defaults to False.\n it also can be a sequence of bool, each element corresponds to a key in ``keys``.\n n_classes: the number of classes to convert to One-Hot format. it also can be a\n sequence of int, each element corresponds to a key in ``keys``.\n threshold_values: whether threshold the float value to int number 0 or 1, default is False.\n it also can be a sequence of bool, each element corresponds to a key in ``keys``.\n logit_thresh: the threshold value for thresholding operation, default is 0.5.\n it also can be a sequence of float, each element corresponds to a key in ``keys``.\n\n ' super().__init__(keys) self.argmax = ensure_tuple_rep(argmax, len(self.keys)) self.to_onehot = ensure_tuple_rep(to_onehot, len(self.keys)) self.n_classes = ensure_tuple_rep(n_classes, len(self.keys)) self.threshold_values = ensure_tuple_rep(threshold_values, len(self.keys)) self.logit_thresh = ensure_tuple_rep(logit_thresh, len(self.keys)) self.converter = AsDiscrete()
def __init__(self, keys: KeysCollection, argmax: Union[(Sequence[bool], bool)]=False, to_onehot: Union[(Sequence[bool], bool)]=False, n_classes: Optional[Union[(Sequence[int], int)]]=None, threshold_values: Union[(Sequence[bool], bool)]=False, logit_thresh: Union[(Sequence[float], float)]=0.5) -> None: '\n Args:\n keys: keys of the corresponding items to model output and label.\n See also: :py:class:`monai.transforms.compose.MapTransform`\n argmax: whether to execute argmax function on input data before transform.\n it also can be a sequence of bool, each element corresponds to a key in ``keys``.\n to_onehot: whether to convert input data into the one-hot format. Defaults to False.\n it also can be a sequence of bool, each element corresponds to a key in ``keys``.\n n_classes: the number of classes to convert to One-Hot format. it also can be a\n sequence of int, each element corresponds to a key in ``keys``.\n threshold_values: whether threshold the float value to int number 0 or 1, default is False.\n it also can be a sequence of bool, each element corresponds to a key in ``keys``.\n logit_thresh: the threshold value for thresholding operation, default is 0.5.\n it also can be a sequence of float, each element corresponds to a key in ``keys``.\n\n ' super().__init__(keys) self.argmax = ensure_tuple_rep(argmax, len(self.keys)) self.to_onehot = ensure_tuple_rep(to_onehot, len(self.keys)) self.n_classes = ensure_tuple_rep(n_classes, len(self.keys)) self.threshold_values = ensure_tuple_rep(threshold_values, len(self.keys)) self.logit_thresh = ensure_tuple_rep(logit_thresh, len(self.keys)) self.converter = AsDiscrete()<|docstring|>Args: keys: keys of the corresponding items to model output and label. See also: :py:class:`monai.transforms.compose.MapTransform` argmax: whether to execute argmax function on input data before transform. it also can be a sequence of bool, each element corresponds to a key in ``keys``. to_onehot: whether to convert input data into the one-hot format. Defaults to False. it also can be a sequence of bool, each element corresponds to a key in ``keys``. n_classes: the number of classes to convert to One-Hot format. it also can be a sequence of int, each element corresponds to a key in ``keys``. threshold_values: whether threshold the float value to int number 0 or 1, default is False. it also can be a sequence of bool, each element corresponds to a key in ``keys``. logit_thresh: the threshold value for thresholding operation, default is 0.5. it also can be a sequence of float, each element corresponds to a key in ``keys``.<|endoftext|>
0a3282145c11e467fee9aaa1c471aecf523064aa06ae658cc4e92c73f248684f
def __init__(self, keys: KeysCollection, applied_labels: Union[(Sequence[int], int)], independent: bool=True, connectivity: Optional[int]=None) -> None: '\n Args:\n keys: keys of the corresponding items to be transformed.\n See also: :py:class:`monai.transforms.compose.MapTransform`\n applied_labels: Labels for applying the connected component on.\n If only one channel. The pixel whose value is not in this list will remain unchanged.\n If the data is in one-hot format, this is the channel indices to apply transform.\n independent: consider several labels as a whole or independent, default is `True`.\n Example use case would be segment label 1 is liver and label 2 is liver tumor, in that case\n you want this "independent" to be specified as False.\n connectivity: Maximum number of orthogonal hops to consider a pixel/voxel as a neighbor.\n Accepted values are ranging from 1 to input.ndim. If ``None``, a full\n connectivity of ``input.ndim`` is used.\n\n ' super().__init__(keys) self.converter = KeepLargestConnectedComponent(applied_labels, independent, connectivity)
Args: keys: keys of the corresponding items to be transformed. See also: :py:class:`monai.transforms.compose.MapTransform` applied_labels: Labels for applying the connected component on. If only one channel. The pixel whose value is not in this list will remain unchanged. If the data is in one-hot format, this is the channel indices to apply transform. independent: consider several labels as a whole or independent, default is `True`. Example use case would be segment label 1 is liver and label 2 is liver tumor, in that case you want this "independent" to be specified as False. connectivity: Maximum number of orthogonal hops to consider a pixel/voxel as a neighbor. Accepted values are ranging from 1 to input.ndim. If ``None``, a full connectivity of ``input.ndim`` is used.
monai/transforms/post/dictionary.py
__init__
dzenanz/MONAI
3
python
def __init__(self, keys: KeysCollection, applied_labels: Union[(Sequence[int], int)], independent: bool=True, connectivity: Optional[int]=None) -> None: '\n Args:\n keys: keys of the corresponding items to be transformed.\n See also: :py:class:`monai.transforms.compose.MapTransform`\n applied_labels: Labels for applying the connected component on.\n If only one channel. The pixel whose value is not in this list will remain unchanged.\n If the data is in one-hot format, this is the channel indices to apply transform.\n independent: consider several labels as a whole or independent, default is `True`.\n Example use case would be segment label 1 is liver and label 2 is liver tumor, in that case\n you want this "independent" to be specified as False.\n connectivity: Maximum number of orthogonal hops to consider a pixel/voxel as a neighbor.\n Accepted values are ranging from 1 to input.ndim. If ``None``, a full\n connectivity of ``input.ndim`` is used.\n\n ' super().__init__(keys) self.converter = KeepLargestConnectedComponent(applied_labels, independent, connectivity)
def __init__(self, keys: KeysCollection, applied_labels: Union[(Sequence[int], int)], independent: bool=True, connectivity: Optional[int]=None) -> None: '\n Args:\n keys: keys of the corresponding items to be transformed.\n See also: :py:class:`monai.transforms.compose.MapTransform`\n applied_labels: Labels for applying the connected component on.\n If only one channel. The pixel whose value is not in this list will remain unchanged.\n If the data is in one-hot format, this is the channel indices to apply transform.\n independent: consider several labels as a whole or independent, default is `True`.\n Example use case would be segment label 1 is liver and label 2 is liver tumor, in that case\n you want this "independent" to be specified as False.\n connectivity: Maximum number of orthogonal hops to consider a pixel/voxel as a neighbor.\n Accepted values are ranging from 1 to input.ndim. If ``None``, a full\n connectivity of ``input.ndim`` is used.\n\n ' super().__init__(keys) self.converter = KeepLargestConnectedComponent(applied_labels, independent, connectivity)<|docstring|>Args: keys: keys of the corresponding items to be transformed. See also: :py:class:`monai.transforms.compose.MapTransform` applied_labels: Labels for applying the connected component on. If only one channel. The pixel whose value is not in this list will remain unchanged. If the data is in one-hot format, this is the channel indices to apply transform. independent: consider several labels as a whole or independent, default is `True`. Example use case would be segment label 1 is liver and label 2 is liver tumor, in that case you want this "independent" to be specified as False. connectivity: Maximum number of orthogonal hops to consider a pixel/voxel as a neighbor. Accepted values are ranging from 1 to input.ndim. If ``None``, a full connectivity of ``input.ndim`` is used.<|endoftext|>
d18004c4ed890220d65f907a76461efe533d12402bcca9aebb3b08729f72bfda
def __init__(self, keys: KeysCollection, kernel_type: str='Laplace') -> None: '\n Args:\n keys: keys of the corresponding items to be transformed.\n See also: :py:class:`monai.transforms.compose.MapTransform`\n kernel_type: the method applied to do edge detection, default is "Laplace".\n\n ' super().__init__(keys) self.converter = LabelToContour(kernel_type=kernel_type)
Args: keys: keys of the corresponding items to be transformed. See also: :py:class:`monai.transforms.compose.MapTransform` kernel_type: the method applied to do edge detection, default is "Laplace".
monai/transforms/post/dictionary.py
__init__
dzenanz/MONAI
3
python
def __init__(self, keys: KeysCollection, kernel_type: str='Laplace') -> None: '\n Args:\n keys: keys of the corresponding items to be transformed.\n See also: :py:class:`monai.transforms.compose.MapTransform`\n kernel_type: the method applied to do edge detection, default is "Laplace".\n\n ' super().__init__(keys) self.converter = LabelToContour(kernel_type=kernel_type)
def __init__(self, keys: KeysCollection, kernel_type: str='Laplace') -> None: '\n Args:\n keys: keys of the corresponding items to be transformed.\n See also: :py:class:`monai.transforms.compose.MapTransform`\n kernel_type: the method applied to do edge detection, default is "Laplace".\n\n ' super().__init__(keys) self.converter = LabelToContour(kernel_type=kernel_type)<|docstring|>Args: keys: keys of the corresponding items to be transformed. See also: :py:class:`monai.transforms.compose.MapTransform` kernel_type: the method applied to do edge detection, default is "Laplace".<|endoftext|>
9acfd4326097c83c9905e26b04982c3cc0aa1af1d9b2352adf9ae5efbe9cb492
def __init__(self, keys: KeysCollection, ensemble: Callable[([Union[(Sequence[torch.Tensor], torch.Tensor)]], torch.Tensor)], output_key: Optional[str]=None) -> None: "\n Args:\n keys: keys of the corresponding items to be stack and execute ensemble.\n if only 1 key provided, suppose it's a PyTorch Tensor with data stacked on dimension `E`.\n output_key: the key to store ensemble result in the dictionary.\n ensemble: callable method to execute ensemble on specified data.\n if only 1 key provided in `keys`, `output_key` can be None and use `keys` as default.\n\n Raises:\n TypeError: When ``ensemble`` is not ``callable``.\n ValueError: When ``len(keys) > 1`` and ``output_key=None``. Incompatible values.\n\n " super().__init__(keys) if (not callable(ensemble)): raise TypeError(f'ensemble must be callable but is {type(ensemble).__name__}.') self.ensemble = ensemble if ((len(self.keys) > 1) and (output_key is None)): raise ValueError('Incompatible values: len(self.keys) > 1 and output_key=None.') self.output_key = (output_key if (output_key is not None) else self.keys[0])
Args: keys: keys of the corresponding items to be stack and execute ensemble. if only 1 key provided, suppose it's a PyTorch Tensor with data stacked on dimension `E`. output_key: the key to store ensemble result in the dictionary. ensemble: callable method to execute ensemble on specified data. if only 1 key provided in `keys`, `output_key` can be None and use `keys` as default. Raises: TypeError: When ``ensemble`` is not ``callable``. ValueError: When ``len(keys) > 1`` and ``output_key=None``. Incompatible values.
monai/transforms/post/dictionary.py
__init__
dzenanz/MONAI
3
python
def __init__(self, keys: KeysCollection, ensemble: Callable[([Union[(Sequence[torch.Tensor], torch.Tensor)]], torch.Tensor)], output_key: Optional[str]=None) -> None: "\n Args:\n keys: keys of the corresponding items to be stack and execute ensemble.\n if only 1 key provided, suppose it's a PyTorch Tensor with data stacked on dimension `E`.\n output_key: the key to store ensemble result in the dictionary.\n ensemble: callable method to execute ensemble on specified data.\n if only 1 key provided in `keys`, `output_key` can be None and use `keys` as default.\n\n Raises:\n TypeError: When ``ensemble`` is not ``callable``.\n ValueError: When ``len(keys) > 1`` and ``output_key=None``. Incompatible values.\n\n " super().__init__(keys) if (not callable(ensemble)): raise TypeError(f'ensemble must be callable but is {type(ensemble).__name__}.') self.ensemble = ensemble if ((len(self.keys) > 1) and (output_key is None)): raise ValueError('Incompatible values: len(self.keys) > 1 and output_key=None.') self.output_key = (output_key if (output_key is not None) else self.keys[0])
def __init__(self, keys: KeysCollection, ensemble: Callable[([Union[(Sequence[torch.Tensor], torch.Tensor)]], torch.Tensor)], output_key: Optional[str]=None) -> None: "\n Args:\n keys: keys of the corresponding items to be stack and execute ensemble.\n if only 1 key provided, suppose it's a PyTorch Tensor with data stacked on dimension `E`.\n output_key: the key to store ensemble result in the dictionary.\n ensemble: callable method to execute ensemble on specified data.\n if only 1 key provided in `keys`, `output_key` can be None and use `keys` as default.\n\n Raises:\n TypeError: When ``ensemble`` is not ``callable``.\n ValueError: When ``len(keys) > 1`` and ``output_key=None``. Incompatible values.\n\n " super().__init__(keys) if (not callable(ensemble)): raise TypeError(f'ensemble must be callable but is {type(ensemble).__name__}.') self.ensemble = ensemble if ((len(self.keys) > 1) and (output_key is None)): raise ValueError('Incompatible values: len(self.keys) > 1 and output_key=None.') self.output_key = (output_key if (output_key is not None) else self.keys[0])<|docstring|>Args: keys: keys of the corresponding items to be stack and execute ensemble. if only 1 key provided, suppose it's a PyTorch Tensor with data stacked on dimension `E`. output_key: the key to store ensemble result in the dictionary. ensemble: callable method to execute ensemble on specified data. if only 1 key provided in `keys`, `output_key` can be None and use `keys` as default. Raises: TypeError: When ``ensemble`` is not ``callable``. ValueError: When ``len(keys) > 1`` and ``output_key=None``. Incompatible values.<|endoftext|>
b503f318ec4a29a0a2299eaede623c720a16cc07dc1692c552aaa21b817a5caf
def __init__(self, keys: KeysCollection, output_key: Optional[str]=None, weights: Optional[Union[(Sequence[float], torch.Tensor, np.ndarray)]]=None) -> None: "\n Args:\n keys: keys of the corresponding items to be stack and execute ensemble.\n if only 1 key provided, suppose it's a PyTorch Tensor with data stacked on dimension `E`.\n output_key: the key to store ensemble result in the dictionary.\n if only 1 key provided in `keys`, `output_key` can be None and use `keys` as default.\n weights: can be a list or tuple of numbers for input data with shape: [E, B, C, H, W[, D]].\n or a Numpy ndarray or a PyTorch Tensor data.\n the `weights` will be added to input data from highest dimension, for example:\n 1. if the `weights` only has 1 dimension, it will be added to the `E` dimension of input data.\n 2. if the `weights` has 3 dimensions, it will be added to `E`, `B` and `C` dimensions.\n it's a typical practice to add weights for different classes:\n to ensemble 3 segmentation model outputs, every output has 4 channels(classes),\n so the input data shape can be: [3, B, 4, H, W, D].\n and add different `weights` for different classes, so the `weights` shape can be: [3, 1, 4].\n for example: `weights = [[[1, 2, 3, 4]], [[4, 3, 2, 1]], [[1, 1, 1, 1]]]`.\n\n " ensemble = MeanEnsemble(weights=weights) super().__init__(keys, ensemble, output_key)
Args: keys: keys of the corresponding items to be stack and execute ensemble. if only 1 key provided, suppose it's a PyTorch Tensor with data stacked on dimension `E`. output_key: the key to store ensemble result in the dictionary. if only 1 key provided in `keys`, `output_key` can be None and use `keys` as default. weights: can be a list or tuple of numbers for input data with shape: [E, B, C, H, W[, D]]. or a Numpy ndarray or a PyTorch Tensor data. the `weights` will be added to input data from highest dimension, for example: 1. if the `weights` only has 1 dimension, it will be added to the `E` dimension of input data. 2. if the `weights` has 3 dimensions, it will be added to `E`, `B` and `C` dimensions. it's a typical practice to add weights for different classes: to ensemble 3 segmentation model outputs, every output has 4 channels(classes), so the input data shape can be: [3, B, 4, H, W, D]. and add different `weights` for different classes, so the `weights` shape can be: [3, 1, 4]. for example: `weights = [[[1, 2, 3, 4]], [[4, 3, 2, 1]], [[1, 1, 1, 1]]]`.
monai/transforms/post/dictionary.py
__init__
dzenanz/MONAI
3
python
def __init__(self, keys: KeysCollection, output_key: Optional[str]=None, weights: Optional[Union[(Sequence[float], torch.Tensor, np.ndarray)]]=None) -> None: "\n Args:\n keys: keys of the corresponding items to be stack and execute ensemble.\n if only 1 key provided, suppose it's a PyTorch Tensor with data stacked on dimension `E`.\n output_key: the key to store ensemble result in the dictionary.\n if only 1 key provided in `keys`, `output_key` can be None and use `keys` as default.\n weights: can be a list or tuple of numbers for input data with shape: [E, B, C, H, W[, D]].\n or a Numpy ndarray or a PyTorch Tensor data.\n the `weights` will be added to input data from highest dimension, for example:\n 1. if the `weights` only has 1 dimension, it will be added to the `E` dimension of input data.\n 2. if the `weights` has 3 dimensions, it will be added to `E`, `B` and `C` dimensions.\n it's a typical practice to add weights for different classes:\n to ensemble 3 segmentation model outputs, every output has 4 channels(classes),\n so the input data shape can be: [3, B, 4, H, W, D].\n and add different `weights` for different classes, so the `weights` shape can be: [3, 1, 4].\n for example: `weights = [[[1, 2, 3, 4]], [[4, 3, 2, 1]], [[1, 1, 1, 1]]]`.\n\n " ensemble = MeanEnsemble(weights=weights) super().__init__(keys, ensemble, output_key)
def __init__(self, keys: KeysCollection, output_key: Optional[str]=None, weights: Optional[Union[(Sequence[float], torch.Tensor, np.ndarray)]]=None) -> None: "\n Args:\n keys: keys of the corresponding items to be stack and execute ensemble.\n if only 1 key provided, suppose it's a PyTorch Tensor with data stacked on dimension `E`.\n output_key: the key to store ensemble result in the dictionary.\n if only 1 key provided in `keys`, `output_key` can be None and use `keys` as default.\n weights: can be a list or tuple of numbers for input data with shape: [E, B, C, H, W[, D]].\n or a Numpy ndarray or a PyTorch Tensor data.\n the `weights` will be added to input data from highest dimension, for example:\n 1. if the `weights` only has 1 dimension, it will be added to the `E` dimension of input data.\n 2. if the `weights` has 3 dimensions, it will be added to `E`, `B` and `C` dimensions.\n it's a typical practice to add weights for different classes:\n to ensemble 3 segmentation model outputs, every output has 4 channels(classes),\n so the input data shape can be: [3, B, 4, H, W, D].\n and add different `weights` for different classes, so the `weights` shape can be: [3, 1, 4].\n for example: `weights = [[[1, 2, 3, 4]], [[4, 3, 2, 1]], [[1, 1, 1, 1]]]`.\n\n " ensemble = MeanEnsemble(weights=weights) super().__init__(keys, ensemble, output_key)<|docstring|>Args: keys: keys of the corresponding items to be stack and execute ensemble. if only 1 key provided, suppose it's a PyTorch Tensor with data stacked on dimension `E`. output_key: the key to store ensemble result in the dictionary. if only 1 key provided in `keys`, `output_key` can be None and use `keys` as default. weights: can be a list or tuple of numbers for input data with shape: [E, B, C, H, W[, D]]. or a Numpy ndarray or a PyTorch Tensor data. the `weights` will be added to input data from highest dimension, for example: 1. if the `weights` only has 1 dimension, it will be added to the `E` dimension of input data. 2. if the `weights` has 3 dimensions, it will be added to `E`, `B` and `C` dimensions. it's a typical practice to add weights for different classes: to ensemble 3 segmentation model outputs, every output has 4 channels(classes), so the input data shape can be: [3, B, 4, H, W, D]. and add different `weights` for different classes, so the `weights` shape can be: [3, 1, 4]. for example: `weights = [[[1, 2, 3, 4]], [[4, 3, 2, 1]], [[1, 1, 1, 1]]]`.<|endoftext|>
d3d3c9e833c635e62f93a5a5ad023bdcf93d2a3001f6d7f62af7f98d9579e129
def __init__(self, keys: KeysCollection, output_key: Optional[str]=None, num_classes: Optional[int]=None) -> None: "\n Args:\n keys: keys of the corresponding items to be stack and execute ensemble.\n if only 1 key provided, suppose it's a PyTorch Tensor with data stacked on dimension `E`.\n output_key: the key to store ensemble result in the dictionary.\n if only 1 key provided in `keys`, `output_key` can be None and use `keys` as default.\n num_classes: if the input is single channel data instead of One-Hot, we can't get class number\n from channel, need to explicitly specify the number of classes to vote.\n\n " ensemble = VoteEnsemble(num_classes=num_classes) super().__init__(keys, ensemble, output_key)
Args: keys: keys of the corresponding items to be stack and execute ensemble. if only 1 key provided, suppose it's a PyTorch Tensor with data stacked on dimension `E`. output_key: the key to store ensemble result in the dictionary. if only 1 key provided in `keys`, `output_key` can be None and use `keys` as default. num_classes: if the input is single channel data instead of One-Hot, we can't get class number from channel, need to explicitly specify the number of classes to vote.
monai/transforms/post/dictionary.py
__init__
dzenanz/MONAI
3
python
def __init__(self, keys: KeysCollection, output_key: Optional[str]=None, num_classes: Optional[int]=None) -> None: "\n Args:\n keys: keys of the corresponding items to be stack and execute ensemble.\n if only 1 key provided, suppose it's a PyTorch Tensor with data stacked on dimension `E`.\n output_key: the key to store ensemble result in the dictionary.\n if only 1 key provided in `keys`, `output_key` can be None and use `keys` as default.\n num_classes: if the input is single channel data instead of One-Hot, we can't get class number\n from channel, need to explicitly specify the number of classes to vote.\n\n " ensemble = VoteEnsemble(num_classes=num_classes) super().__init__(keys, ensemble, output_key)
def __init__(self, keys: KeysCollection, output_key: Optional[str]=None, num_classes: Optional[int]=None) -> None: "\n Args:\n keys: keys of the corresponding items to be stack and execute ensemble.\n if only 1 key provided, suppose it's a PyTorch Tensor with data stacked on dimension `E`.\n output_key: the key to store ensemble result in the dictionary.\n if only 1 key provided in `keys`, `output_key` can be None and use `keys` as default.\n num_classes: if the input is single channel data instead of One-Hot, we can't get class number\n from channel, need to explicitly specify the number of classes to vote.\n\n " ensemble = VoteEnsemble(num_classes=num_classes) super().__init__(keys, ensemble, output_key)<|docstring|>Args: keys: keys of the corresponding items to be stack and execute ensemble. if only 1 key provided, suppose it's a PyTorch Tensor with data stacked on dimension `E`. output_key: the key to store ensemble result in the dictionary. if only 1 key provided in `keys`, `output_key` can be None and use `keys` as default. num_classes: if the input is single channel data instead of One-Hot, we can't get class number from channel, need to explicitly specify the number of classes to vote.<|endoftext|>
df321a4d212b18579bd4574d45e45d316934807a98356de69fa977c13ddc7bb7
async def fetch_worker(queue: asyncio.Queue, engine): "\n The worker task performing items added to the Engine's Queue\n\n Args:\n queue (asyncio.Queue): The Queue \n engine (BaseFetchingEngine): The engine itself\n " engine: BaseFetchingEngine register: FetcherRegister = engine.register while True: event: FetchEvent callback: Coroutine (event, callback) = (await queue.get()) try: fetcher = register.get_fetcher_for_event(event) async with fetcher: res = (await fetcher.fetch()) data = (await fetcher.process(res)) try: (await callback(data)) except Exception as err: logger.exception(f'Fetcher callback - {callback} failed') (await engine._on_failure(err, event)) except Exception as err: logger.exception('Failed to process fetch event') (await engine._on_failure(err, event)) finally: queue.task_done()
The worker task performing items added to the Engine's Queue Args: queue (asyncio.Queue): The Queue engine (BaseFetchingEngine): The engine itself
opal_common/fetcher/engine/fetch_worker.py
fetch_worker
pujan14/opal
367
python
async def fetch_worker(queue: asyncio.Queue, engine): "\n The worker task performing items added to the Engine's Queue\n\n Args:\n queue (asyncio.Queue): The Queue \n engine (BaseFetchingEngine): The engine itself\n " engine: BaseFetchingEngine register: FetcherRegister = engine.register while True: event: FetchEvent callback: Coroutine (event, callback) = (await queue.get()) try: fetcher = register.get_fetcher_for_event(event) async with fetcher: res = (await fetcher.fetch()) data = (await fetcher.process(res)) try: (await callback(data)) except Exception as err: logger.exception(f'Fetcher callback - {callback} failed') (await engine._on_failure(err, event)) except Exception as err: logger.exception('Failed to process fetch event') (await engine._on_failure(err, event)) finally: queue.task_done()
async def fetch_worker(queue: asyncio.Queue, engine): "\n The worker task performing items added to the Engine's Queue\n\n Args:\n queue (asyncio.Queue): The Queue \n engine (BaseFetchingEngine): The engine itself\n " engine: BaseFetchingEngine register: FetcherRegister = engine.register while True: event: FetchEvent callback: Coroutine (event, callback) = (await queue.get()) try: fetcher = register.get_fetcher_for_event(event) async with fetcher: res = (await fetcher.fetch()) data = (await fetcher.process(res)) try: (await callback(data)) except Exception as err: logger.exception(f'Fetcher callback - {callback} failed') (await engine._on_failure(err, event)) except Exception as err: logger.exception('Failed to process fetch event') (await engine._on_failure(err, event)) finally: queue.task_done()<|docstring|>The worker task performing items added to the Engine's Queue Args: queue (asyncio.Queue): The Queue engine (BaseFetchingEngine): The engine itself<|endoftext|>
1e500ec44d8f21e2a8020df3635795acaa570f6e210edaca5a2e596fdebd92a5
def object_link(self, obj): 'Returns the admin link to the log entry object if it exists.' admin_url = (None if obj.is_deletion() else obj.get_admin_url()) if admin_url: return format_html('<a href="{}">{}</a>', admin_url, obj.object_repr) else: return obj.object_repr
Returns the admin link to the log entry object if it exists.
radical_translations/utils/admin.py
object_link
kingsdigitallab/radical_translations
3
python
def object_link(self, obj): admin_url = (None if obj.is_deletion() else obj.get_admin_url()) if admin_url: return format_html('<a href="{}">{}</a>', admin_url, obj.object_repr) else: return obj.object_repr
def object_link(self, obj): admin_url = (None if obj.is_deletion() else obj.get_admin_url()) if admin_url: return format_html('<a href="{}">{}</a>', admin_url, obj.object_repr) else: return obj.object_repr<|docstring|>Returns the admin link to the log entry object if it exists.<|endoftext|>