hexsha string | size int64 | ext string | lang string | max_stars_repo_path string | max_stars_repo_name string | max_stars_repo_head_hexsha string | max_stars_repo_licenses list | max_stars_count int64 | max_stars_repo_stars_event_min_datetime string | max_stars_repo_stars_event_max_datetime string | max_issues_repo_path string | max_issues_repo_name string | max_issues_repo_head_hexsha string | max_issues_repo_licenses list | max_issues_count int64 | max_issues_repo_issues_event_min_datetime string | max_issues_repo_issues_event_max_datetime string | max_forks_repo_path string | max_forks_repo_name string | max_forks_repo_head_hexsha string | max_forks_repo_licenses list | max_forks_count int64 | max_forks_repo_forks_event_min_datetime string | max_forks_repo_forks_event_max_datetime string | content string | avg_line_length float64 | max_line_length int64 | alphanum_fraction float64 | qsc_code_num_words_quality_signal int64 | qsc_code_num_chars_quality_signal float64 | qsc_code_mean_word_length_quality_signal float64 | qsc_code_frac_words_unique_quality_signal float64 | qsc_code_frac_chars_top_2grams_quality_signal float64 | qsc_code_frac_chars_top_3grams_quality_signal float64 | qsc_code_frac_chars_top_4grams_quality_signal float64 | qsc_code_frac_chars_dupe_5grams_quality_signal float64 | qsc_code_frac_chars_dupe_6grams_quality_signal float64 | qsc_code_frac_chars_dupe_7grams_quality_signal float64 | qsc_code_frac_chars_dupe_8grams_quality_signal float64 | qsc_code_frac_chars_dupe_9grams_quality_signal float64 | qsc_code_frac_chars_dupe_10grams_quality_signal float64 | qsc_code_frac_chars_replacement_symbols_quality_signal float64 | qsc_code_frac_chars_digital_quality_signal float64 | qsc_code_frac_chars_whitespace_quality_signal float64 | qsc_code_size_file_byte_quality_signal float64 | qsc_code_num_lines_quality_signal float64 | qsc_code_num_chars_line_max_quality_signal float64 | qsc_code_num_chars_line_mean_quality_signal float64 | qsc_code_frac_chars_alphabet_quality_signal float64 | qsc_code_frac_chars_comments_quality_signal float64 | qsc_code_cate_xml_start_quality_signal float64 | qsc_code_frac_lines_dupe_lines_quality_signal float64 | qsc_code_cate_autogen_quality_signal float64 | qsc_code_frac_lines_long_string_quality_signal float64 | qsc_code_frac_chars_string_length_quality_signal float64 | qsc_code_frac_chars_long_word_length_quality_signal float64 | qsc_code_frac_lines_string_concat_quality_signal float64 | qsc_code_cate_encoded_data_quality_signal float64 | qsc_code_frac_chars_hex_words_quality_signal float64 | qsc_code_frac_lines_prompt_comments_quality_signal float64 | qsc_code_frac_lines_assert_quality_signal float64 | qsc_codepython_cate_ast_quality_signal float64 | qsc_codepython_frac_lines_func_ratio_quality_signal float64 | qsc_codepython_cate_var_zero_quality_signal bool | qsc_codepython_frac_lines_pass_quality_signal float64 | qsc_codepython_frac_lines_import_quality_signal float64 | qsc_codepython_frac_lines_simplefunc_quality_signal float64 | qsc_codepython_score_lines_no_logic_quality_signal float64 | qsc_codepython_frac_lines_print_quality_signal float64 | qsc_code_num_words int64 | qsc_code_num_chars int64 | qsc_code_mean_word_length int64 | qsc_code_frac_words_unique null | qsc_code_frac_chars_top_2grams int64 | qsc_code_frac_chars_top_3grams int64 | qsc_code_frac_chars_top_4grams int64 | qsc_code_frac_chars_dupe_5grams int64 | qsc_code_frac_chars_dupe_6grams int64 | qsc_code_frac_chars_dupe_7grams int64 | qsc_code_frac_chars_dupe_8grams int64 | qsc_code_frac_chars_dupe_9grams int64 | qsc_code_frac_chars_dupe_10grams int64 | qsc_code_frac_chars_replacement_symbols int64 | qsc_code_frac_chars_digital int64 | qsc_code_frac_chars_whitespace int64 | qsc_code_size_file_byte int64 | qsc_code_num_lines int64 | qsc_code_num_chars_line_max int64 | qsc_code_num_chars_line_mean int64 | qsc_code_frac_chars_alphabet int64 | qsc_code_frac_chars_comments int64 | qsc_code_cate_xml_start int64 | qsc_code_frac_lines_dupe_lines int64 | qsc_code_cate_autogen int64 | qsc_code_frac_lines_long_string int64 | qsc_code_frac_chars_string_length int64 | qsc_code_frac_chars_long_word_length int64 | qsc_code_frac_lines_string_concat null | qsc_code_cate_encoded_data int64 | qsc_code_frac_chars_hex_words int64 | qsc_code_frac_lines_prompt_comments int64 | qsc_code_frac_lines_assert int64 | qsc_codepython_cate_ast int64 | qsc_codepython_frac_lines_func_ratio int64 | qsc_codepython_cate_var_zero int64 | qsc_codepython_frac_lines_pass int64 | qsc_codepython_frac_lines_import int64 | qsc_codepython_frac_lines_simplefunc int64 | qsc_codepython_score_lines_no_logic int64 | qsc_codepython_frac_lines_print int64 | effective string | hits int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
ebe90d3ec965171aab1f9c6a19ec66adb12fe860 | 2,481 | py | Python | main.py | Servuc/Concoction | 901d0462b8b46989140b6c0007eae324b445a51c | [
"MIT"
] | null | null | null | main.py | Servuc/Concoction | 901d0462b8b46989140b6c0007eae324b445a51c | [
"MIT"
] | null | null | null | main.py | Servuc/Concoction | 901d0462b8b46989140b6c0007eae324b445a51c | [
"MIT"
] | null | null | null | from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
from urlparse import urlparse, parse_qs
import argparse
import concoction
class WebServer(BaseHTTPRequestHandler):
def _set_headers(self):
self.send_response(200)
self.send_header('Content-type', 'text/plain')
self.end_headers()
def do_GET(self):
self._set_headers()
if self.path[:9] != "/?recipe=":
self.wfile.write("You must give recipe parameter")
else:
query_components = parse_qs(urlparse(self.path).query)
if "recipe" not in query_components:
self.wfile.write("You must give recipe parameter")
self.wfile.write(concoction.Concoction().process(map(lambda x: x, str(query_components["recipe"]))))
def run(server_class=HTTPServer, handler_class=WebServer, port=80, verbose=False):
server_address = ('', port)
httpd = server_class(server_address, handler_class)
if verbose:
print 'Starting httpd...'
httpd.serve_forever()
def parse_args():
# Parsing args
parser = argparse.ArgumentParser(description="Generate a Chef program")
main_group = parser.add_mutually_exclusive_group()
group_file = main_group.add_argument_group()
group = group_file.add_mutually_exclusive_group()
group.add_argument("-s", "--string", action="store", type=str, help="Set string as input", default="")
group.add_argument("-f", "--file", action="store", type=str, help="Set file as input")
group_file.add_argument("-o", "--out", action="store", type=str, help="Set file as output")
main_group.add_argument("-p", "--port", action="store", type=int, help="Start as web server", default=-1)
parser.add_argument("-v", "--verbose", action="store_true", help="Allow verbose")
return parser.parse_args()
if __name__ == "__main__":
args = parse_args()
if args.port != -1:
run(port=args.port,verbose=args.verbose)
else:
my_concoction = concoction.Concoction(args.verbose)
my_output_file = "concoction.chef"
if args.out is not None:
my_output_file = args.out
my_input_text = ""
if args.string is not None and len(args.string) != 0:
my_input_text = args.string
else:
if args.file is not None:
my_input_text = my_concoction.read_file(args.file)
my_concoction.write_file(my_output_file,my_concoction.process(my_input_text))
| 35.442857 | 112 | 0.666264 | 321 | 2,481 | 4.928349 | 0.327103 | 0.041719 | 0.040455 | 0.034134 | 0.143489 | 0.105563 | 0.08976 | 0.08976 | 0 | 0 | 0 | 0.004582 | 0.208384 | 2,481 | 69 | 113 | 35.956522 | 0.800917 | 0.004837 | 0 | 0.098039 | 0 | 0 | 0.132144 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.078431 | null | null | 0.019608 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
ebeeb805904a79938cde91417b1c02cbe5f1a69d | 592 | py | Python | Competitive_Programming/Stepping_stones_3.py | varshakancham/Data_Structure_n_Algorithms | aa7bedaf8ef01f8704768174cc639fae0ded0616 | [
"MIT"
] | 125 | 2018-10-19T04:56:03.000Z | 2022-03-29T19:48:27.000Z | Competitive_Programming/Stepping_stones_3.py | varshakancham/Data_Structure_n_Algorithms | aa7bedaf8ef01f8704768174cc639fae0ded0616 | [
"MIT"
] | 14 | 2018-10-22T17:50:48.000Z | 2020-10-03T07:55:07.000Z | Competitive_Programming/Stepping_stones_3.py | varshakancham/Data_Structure_n_Algorithms | aa7bedaf8ef01f8704768174cc639fae0ded0616 | [
"MIT"
] | 40 | 2018-10-21T12:35:50.000Z | 2022-03-27T07:06:13.000Z | """
Vasu is running up a stone staircase with N stones, and can hop(jump) either 1 step, 2 steps or 3 steps at a time.
You have to count, how many possible ways Vasu can run up to the stone stairs.
Input Format:
Input contains integer N that is number of steps
Constraints:
1<= N <=30
Output Format:
Output for each integer N the no of possible ways w.
"""
def hop(N) :
if (N == 1 or N == 0) :
return 1
elif (N == 2) :
return 2
else :
return hop(N - 3) + hop(N - 2) + hop(N - 1)
N = int(input())
print(hop(N))
| 20.413793 | 115 | 0.581081 | 102 | 592 | 3.372549 | 0.54902 | 0.05814 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.034913 | 0.322635 | 592 | 28 | 116 | 21.142857 | 0.822943 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0 | null | null | 0.111111 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
ebf6761527c67a0c02c00d30530607069e7a51f6 | 22,473 | py | Python | src/spider/spiders.py | jsrgqinbin/ok_ip_proxy_pool | 383523f169845903504ba6549e7d4fb6fc36f20c | [
"MIT"
] | null | null | null | src/spider/spiders.py | jsrgqinbin/ok_ip_proxy_pool | 383523f169845903504ba6549e7d4fb6fc36f20c | [
"MIT"
] | null | null | null | src/spider/spiders.py | jsrgqinbin/ok_ip_proxy_pool | 383523f169845903504ba6549e7d4fb6fc36f20c | [
"MIT"
] | null | null | null | import asyncio
import random
import re
from abc import ABC
from typing import List, Iterable
from src.entity.proxy_entity import ProxyEntity
from src.enum.common import ProxyCoverEnum, ProxyTypeEnum
from src.log.logger import logger
from src.spider.abs_spider import AbsSpider
from bs4 import BeautifulSoup, Tag
from pyppeteer import launch
spider_collection = {}
def spider_register(cls):
spider_collection.update({cls.__name__: cls()})
logger.info(f'注册{cls.__name__}')
return cls
@spider_register
class SpiderSpysOneIP(AbsSpider, ABC):
"""
spys.one
http://spys.one/proxys
"""
def __init__(self) -> None:
super().__init__('spys.one IP代理爬虫')
async def _scrape(self):
addr_re = r'\d{2,3}\.\d{2,3}\.\d{2,3}\.\d{2,3}'
addr_port_re = addr_re + r':\d{2,5}'
countries = ['US', 'UK', 'DE', 'JP']
browser = await launch(
headless=False,
handleSIGINT=False,
handleSIGTERM=False,
handleSIGHUP=False
)
page = await browser.newPage()
if countries is None:
await page.goto(self.get_urls()[0], {
"waitLoad": True,
"waitNetworkIdle": True
})
await asyncio.sleep(random.uniform(2, 3))
countries = [
await page.evaluate('(ele) => ele.innerText', ele) for ele in await
page.xpath('//a[@href]//*[@class="spy6"]//*[@class="spy4"]')
]
res = []
for country in countries:
result = []
logger.info(f"Extracting proxies from Spys {country}.")
page_url = self.get_urls()[0]
await page.goto(f'{page_url}{country}', {
"waitLoad": True,
"waitNetworkIdle": True
})
await asyncio.sleep(self.get_interval())
table_rows = [
await page.evaluate('(ele) => ele.innerText', ele)
for ele in await page.xpath('//*[contains(@class,"spy1x")]')
]
for row_data in table_rows:
if "HTTP" in row_data or "SOCK" in row_data:
proxy_matches = row_data.split("\t")
protocol = proxy_matches[1]
if "SOCKS5" in protocol:
continue
if "HTTPS" in protocol:
protocol = 'https'
elif "HTTP" in protocol:
protocol = 'http'
else:
continue
ip_port = proxy_matches[0]
proxy_cover = proxy_matches[2]
region = proxy_matches[3]
result.append(ProxyEntity(f'{protocol}://{ip_port}',
source=self._name,
proxy_type=self._judge_proxy_type(protocol),
proxy_cover=self._judge_proxy_cover(proxy_cover),
region=region))
res.extend(result)
await asyncio.sleep(self.get_interval())
print(f"Extracted {len(result)} total proxies from Spys One.")
await browser.close()
return res
async def crawl(self):
logger.info(f'{self._name}开始爬取...')
logger.info(f'_scrape_free_proxy_list_net 开始运行...')
result = await self._scrape()
return result
def get_urls(self) -> List[str]:
return ['http://spys.one/free-proxy-list/']
# 爬太快会被封
def get_interval(self) -> int:
return 2
def get_page_url(self, url, page) -> str:
return url
def get_encoding(self) -> str:
return 'utf-8'
@staticmethod
def _judge_proxy_type(type_str: str):
type_low = type_str.lower()
if type_low == 'http':
return ProxyTypeEnum.HTTP.value
elif type_low == 'https':
return ProxyTypeEnum.HTTPS.value
else:
return ProxyTypeEnum.UNKNOWN.value
@staticmethod
def _judge_proxy_cover(cover_str: str):
if cover_str == 'HIA':
return ProxyCoverEnum.HIGH_COVER.value
elif cover_str == 'ANM':
return ProxyCoverEnum.NORMAL_COVER.value
else:
return ProxyCoverEnum.UNKNOWN.value
@spider_register
class SpiderFreeProxyListIP(AbsSpider, ABC):
"""
free-proxy-list
https://free-proxy-list.net
"""
def __init__(self) -> None:
super().__init__('free-proxy-list IP代理爬虫')
async def _scrape(self):
browser = await launch(
headless=False,
handleSIGINT=False,
handleSIGTERM=False,
handleSIGHUP=False
)
page = await browser.newPage()
await page.goto(self.get_urls()[0], {
"waitLoad": True,
"waitNetworkIdle": True
})
res = []
while True:
result = []
await asyncio.sleep(self.get_interval())
col_names = [
await page.evaluate('(ele) => ele.innerText.toLowerCase()', ele)
for ele in await page.xpath('//*[@id="proxylisttable"]/thead/*[@role="row"]//*[@aria-label]')
]
for row in await page.xpath('//*[@id="proxylisttable"]/tbody/*[@role="row"]'):
col_values = [
await page.evaluate('(ele) => ele.innerText', ele)
for ele in await row.xpath('./td')
]
row_data = dict(zip(col_names, col_values))
protocol = 'http'
if row_data['https'] == 'yes':
protocol = 'https'
ip = row_data['ip address\t'].replace("\t", '')
port = row_data['port\t'].replace("\t", '')
proxy_cover = row_data['anonymity\t'].replace("\t", '')
region = row_data['code\t'].replace("\t", '')
result.append(ProxyEntity(f'{protocol}://{ip}:{port}',
source=self._name,
proxy_type=self._judge_proxy_type(protocol),
proxy_cover=self._judge_proxy_cover(proxy_cover),
region=region))
res.extend(result)
next_button_ele = await page.xpath('//*[@class="fg-button ui-button ui-state-default next"]')
if next_button_ele:
await next_button_ele[0].click()
else:
await browser.close()
logger.error(f"Extracted {len(result)} total proxies from free-proxy-list.net")
return res
async def crawl(self):
logger.info(f'{self._name}开始爬取...')
logger.info(f'_scrape_free_proxy_list_net 开始运行...')
result = await self._scrape()
return result
def get_urls(self) -> List[str]:
return ['https://free-proxy-list.net']
# 爬太快会被封
def get_interval(self) -> int:
return 2
def get_page_url(self, url, page) -> str:
return url
def get_encoding(self) -> str:
return 'utf-8'
@staticmethod
def _judge_proxy_type(type_str: str):
type_low = type_str.lower()
if type_low == 'http':
return ProxyTypeEnum.HTTP.value
elif type_low == 'https':
return ProxyTypeEnum.HTTPS.value
else:
return ProxyTypeEnum.UNKNOWN.value
@staticmethod
def _judge_proxy_cover(cover_str: str):
if cover_str == 'elite proxy':
return ProxyCoverEnum.HIGH_COVER.value
elif cover_str == 'anonymous':
return ProxyCoverEnum.NORMAL_COVER.value
else:
return ProxyCoverEnum.UNKNOWN.value
@spider_register
class Spider66Ip(AbsSpider):
"""
66IP代理爬虫 刷新速度:🐌慢
http://www.66ip.cn/
"""
def __init__(self) -> None:
super().__init__('66IP代理爬虫')
def do_crawl(self, resp) -> List[ProxyEntity]:
result = []
soup = BeautifulSoup(resp, 'lxml')
tr_list = soup.find('table', attrs={'width': '100%', 'bordercolor': '#6699ff'}).find_all('tr')
for i, tr in enumerate(tr_list):
if i == 0:
continue
contents = tr.contents
ip = contents[0].text
port = contents[1].text
region = contents[2].text
proxy_cover = contents[3].text
result.append(ProxyEntity(f'http://{ip}:{port}',
source=self._name,
proxy_cover=self._judge_proxy_cover(proxy_cover),
region=region))
return result
def get_urls(self) -> List[str]:
return ['http://www.66ip.cn']
def get_page_range(self) -> Iterable:
return range(1, 6)
def get_page_url(self, url, page) -> str:
return f'{url}/{page}.html'
def get_encoding(self) -> str:
return 'gb2312'
@staticmethod
def _judge_proxy_cover(cover_str: str):
if cover_str == '高匿代理':
return ProxyCoverEnum.HIGH_COVER.value
else:
return ProxyCoverEnum.UNKNOWN.value
@spider_register
class SpiderQuanWangIp(AbsSpider):
"""
全网IP代理爬虫 刷新速度:极快
http://www.goubanjia.com/
"""
def __init__(self) -> None:
super().__init__('全网IP代理爬虫')
def do_crawl(self, resp) -> List[ProxyEntity]:
result = []
soup = BeautifulSoup(resp, 'lxml')
tr_list = soup.find('tbody').find_all('tr')
for i, tr in enumerate(tr_list):
tds = tr.find_all('td')
id_and_port = tds[0]
ip, port = self._parse_ip_and_port(id_and_port)
proxy_cover = tds[1].text
proxy_type = tds[2].text
region = tds[3].contents[1].text
supplier = tds[4].text
result.append(ProxyEntity(f'{proxy_type.lower()}://{ip}:{port}',
source=self._name,
supplier=supplier,
proxy_type=self._judge_proxy_type(proxy_type),
proxy_cover=self._judge_proxy_cover(proxy_cover),
region=region
)
)
return result
def get_urls(self) -> List[str]:
return ['http://www.goubanjia.com']
def get_page_url(self, url, page) -> str:
return url
def _parse_ip_and_port(self, ip_td: Tag):
res = []
contents = ip_td.find_all(['div', 'span'])
for content in contents:
res.append(content.text)
res.pop()
ip = ''.join(res)
port_tag = contents[-1]
port_ori_str = port_tag.get('class')[1]
# 解码真实的端口
port = 0
for c in port_ori_str:
port *= 10
port += (ord(c) - ord('A'))
port /= 8
port = int(port)
return ip, str(port)
def _judge_proxy_type(self, type_str: str):
type_low = type_str.lower()
if type_low == 'http':
return ProxyTypeEnum.HTTP.value
elif type_low == 'https':
return ProxyTypeEnum.HTTPS.value
else:
return ProxyTypeEnum.UNKNOWN.value
def _judge_proxy_cover(self, cover_str: str):
if cover_str == '透明':
return ProxyCoverEnum.TRANSPARENT.value
elif cover_str == '高匿':
return ProxyCoverEnum.HIGH_COVER.value
else:
return ProxyCoverEnum.UNKNOWN.value
@spider_register
class SpiderXiciIp(AbsSpider):
"""
西刺代理爬虫 刷新速度:🐌慢
基本上没几个代理个能用🆒
https://www.xicidaili.com/
"""
def __init__(self) -> None:
super().__init__('西刺IP代理爬虫')
def do_crawl(self, resp) -> List[ProxyEntity]:
result = []
soup = BeautifulSoup(resp, 'lxml')
tab = soup.find('table', attrs={'id': 'ip_list'})
if tab is None:
return []
tr_list = tab.find_all('tr')[1: -1]
for tr in tr_list:
tds = tr.find_all('td')
ip = tds[1].text
port = tds[2].text
proxy_cover = tds[4].text
proxy_type = tds[5].text
result.append(ProxyEntity(f'{proxy_type.lower()}://{ip}:{port}',
source=self._name,
proxy_cover=self._judge_proxy_cover(proxy_cover),
proxy_type=self._judge_proxy_type(proxy_type),
))
return result
def get_urls(self) -> List[str]:
return [
'https://www.xicidaili.com/nn', # 高匿
'https://www.xicidaili.com/nt' # 透明
]
def get_page_range(self) -> Iterable:
return range(1, 3)
@staticmethod
def _judge_proxy_cover(cover_str: str):
if cover_str == '高匿':
return ProxyCoverEnum.HIGH_COVER.value
if cover_str == '透明':
return ProxyCoverEnum.TRANSPARENT.value
else:
return ProxyCoverEnum.UNKNOWN.value
@staticmethod
def _judge_proxy_type(type_str: str):
if type_str == 'HTTPS':
return ProxyTypeEnum.HTTPS.value
if type_str == 'HTTP':
return ProxyTypeEnum.HTTP.value
else:
return ProxyTypeEnum.UNKNOWN.value
@spider_register
class SpiderKuaiDaiLiIp(AbsSpider):
"""
快代理IP 刷新速度: 极快
https://www.kuaidaili.com/free
"""
def __init__(self) -> None:
super().__init__('快代理IP代理爬虫')
def do_crawl(self, resp) -> List[ProxyEntity]:
result = []
soup = BeautifulSoup(resp, 'lxml')
trs = soup.find('table').find('tbody').find_all('tr')
for tr in trs:
tds = tr.find_all('td')
ip = tds[0].text
port = tds[1].text
proxy_cover = tds[2].text
proxy_type = tds[3].text
region = tds[4].text
result.append(ProxyEntity(f'{proxy_type.lower()}://{ip}:{port}',
# ip, port, protocol=proxy_type.lower(),
source=self._name,
proxy_type=self._judge_proxy_type(proxy_type),
proxy_cover=self._judge_proxy_cover(proxy_cover),
region=region))
return result
def get_urls(self) -> List[str]:
return [
'https://www.kuaidaili.com/free/inha', # 高匿
'https://www.kuaidaili.com/free/intr' # 透明
]
def get_page_range(self) -> Iterable:
return range(1, 3)
# 爬太快会被封
def get_interval(self) -> int:
return 3
def _judge_proxy_type(self, type_str: str):
type_low = type_str.lower()
if type_low == 'http':
return ProxyTypeEnum.HTTP.value
elif type_low == 'https':
return ProxyTypeEnum.HTTPS.value
else:
return ProxyTypeEnum.UNKNOWN.value
def _judge_proxy_cover(self, cover_str: str):
if cover_str == '透明':
return ProxyCoverEnum.TRANSPARENT.value
elif cover_str == '高匿名':
return ProxyCoverEnum.HIGH_COVER.value
else:
return ProxyCoverEnum.UNKNOWN.value
@spider_register
class SpiderYunDaiLiIp(AbsSpider):
"""
云代理IP 刷新速度: 快
http://www.ip3366.net/free
"""
def __init__(self) -> None:
super().__init__('云代理IP爬虫')
def do_crawl(self, resp) -> List[ProxyEntity]:
result = []
soup = BeautifulSoup(resp, 'lxml')
trs = soup.find('table').find('tbody').find_all('tr')
for tr in trs:
tds = tr.find_all('td')
ip = tds[0].text
port = tds[1].text
proxy_cover = tds[2].text
proxy_type = tds[3].text
region = tds[4].text
result.append(ProxyEntity(f'{proxy_type.lower()}://{ip}:{port}',
source=self._name,
proxy_type=self._judge_proxy_type(proxy_type),
proxy_cover=self._judge_proxy_cover(proxy_cover),
region=region))
return result
def get_urls(self) -> List[str]:
return [
'http://www.ip3366.net/free/?stype=1', # 高匿
'http://www.ip3366.net/free/?stype=2' # 透明 or 普匿
]
def get_page_range(self) -> Iterable:
return range(1, 3)
def get_page_url(self, url, page) -> str:
return f'{url}&page={page}'
def _judge_proxy_type(self, type_str: str):
type_low = type_str.lower()
if type_low == 'http':
return ProxyTypeEnum.HTTP.value
elif type_low == 'https':
return ProxyTypeEnum.HTTPS.value
else:
return ProxyTypeEnum.UNKNOWN.value
def _judge_proxy_cover(self, cover_str: str):
if cover_str == '透明代理IP':
return ProxyCoverEnum.TRANSPARENT.value
elif cover_str == '高匿代理IP':
return ProxyCoverEnum.HIGH_COVER.value
elif cover_str == '普通代理IP':
return ProxyCoverEnum.NORMAL_COVER.value
else:
return ProxyCoverEnum.UNKNOWN.value
@spider_register
class SpiderIpHaiIp(AbsSpider):
"""
IP海代理IP 刷新速度: 8分钟/1个
有时会连不上
http://www.iphai.com
"""
def __init__(self) -> None:
super().__init__('IP海代理IP爬虫')
def do_crawl(self, resp) -> List[ProxyEntity]:
result = []
soup = BeautifulSoup(resp, 'lxml')
table = soup.find('table')
if table is None:
return []
tbody = soup.find('tbody')
if tbody is None:
return []
trs = tbody.find_all('tr')
for i, tr in enumerate(trs):
if i == 0:
continue
tds = tr.find_all('td')
ip = tds[0].text
port = tds[1].text
proxy_cover = tds[2].text
proxy_type = tds[3].text if tds[3].text != '' else 'http'
region = tds[4].text
result.append(ProxyEntity(f'{proxy_type.lower()}://{ip}:{port}',
source=self._name,
proxy_type=self._judge_proxy_type(proxy_type),
proxy_cover=self._judge_proxy_cover(proxy_cover),
region=region))
return result
def get_urls(self) -> List[str]:
return [
'http://www.iphai.com/free/ng', # 国内高匿
'http://www.iphai.com/free/np', # 国内普通
'http://www.iphai.com/free/wg', # 国外高匿
'http://www.iphai.com/free/wp', # 国外普通
]
# 爬太快会被封
def get_interval(self) -> int:
return 2
def get_page_url(self, url, page) -> str:
return url
@staticmethod
def _judge_proxy_type(type_str: str):
type_low = type_str.lower()
if type_low == 'http':
return ProxyTypeEnum.HTTP.value
elif type_low == 'https':
return ProxyTypeEnum.HTTPS.value
else:
return ProxyTypeEnum.UNKNOWN.value
@staticmethod
def _judge_proxy_cover(cover_str: str):
if cover_str == '透明':
return ProxyCoverEnum.TRANSPARENT.value
elif cover_str == '高匿':
return ProxyCoverEnum.HIGH_COVER.value
elif cover_str == '普匿':
return ProxyCoverEnum.NORMAL_COVER.value
else:
return ProxyCoverEnum.UNKNOWN.value
@spider_register
class SpiderMianFeiDaiLiIp(AbsSpider):
"""
免费代理IP库
http://ip.jiangxianli.com/
"""
def __init__(self) -> None:
super().__init__('免费代理IP爬虫')
def do_crawl(self, resp) -> List[ProxyEntity]:
result = []
soup = BeautifulSoup(resp, 'lxml')
table = soup.find('table')
if table is None:
return []
tbody = soup.find('tbody')
if tbody is None:
return []
trs = tbody.find_all('tr')
for i, tr in enumerate(trs):
if i == 0:
continue
tds = tr.find_all('td')
logger.info('免费代理IP爬虫 -- ' + tds)
ip = tds[0].text
port = tds[1].text
proxy_cover = tds[2].text
proxy_type = tds[3].text if tds[2].text != '' else 'http'
region = tds[4].text
supplier = tds[5].text
result.append(ProxyEntity(f'{proxy_type.lower()}://{ip}:{port}',
source=self._name,
supplier=supplier,
proxy_type=self._judge_proxy_type(proxy_type),
proxy_cover=self._judge_proxy_cover(proxy_cover),
region=region))
return result
def get_interval(self) -> int:
return 2
def get_page_range(self) -> Iterable:
return range(1, 4)
def get_urls(self) -> List[str]:
return ['http://ip.jiangxianli.com/?page={}']
def get_page_url(self, url, page) -> str:
return url.format(page)
@staticmethod
def _judge_proxy_type(type_str: str):
type_low = type_str.lower()
if type_low == 'http':
return ProxyTypeEnum.HTTP.value
elif type_low == 'https':
return ProxyTypeEnum.HTTPS.value
else:
return ProxyTypeEnum.UNKNOWN.value
@staticmethod
def _judge_proxy_cover(cover_str: str):
if cover_str == '透明':
return ProxyCoverEnum.TRANSPARENT.value
elif cover_str == '高匿':
return ProxyCoverEnum.HIGH_COVER.value
elif cover_str == '普匿':
return ProxyCoverEnum.NORMAL_COVER.value
else:
return ProxyCoverEnum.UNKNOWN.value
if __name__ == '__main__':
# proxies = []
# tasks = [SpiderXiciIp().crawl()]
# loop = asyncio.new_event_loop()
# asyncio.set_event_loop(loop)
# results = loop.run_until_complete(asyncio.gather(*tasks))
# loop.close()
results = asyncio.run(SpiderSpysOneIP().crawl())
print(results)
| 32.71179 | 109 | 0.528679 | 2,454 | 22,473 | 4.638957 | 0.118582 | 0.039529 | 0.023718 | 0.024157 | 0.744993 | 0.709768 | 0.682273 | 0.633784 | 0.604972 | 0.597505 | 0 | 0.008753 | 0.354381 | 22,473 | 686 | 110 | 32.759475 | 0.775656 | 0.030214 | 0 | 0.6875 | 0 | 0.001838 | 0.094429 | 0.026351 | 0 | 0 | 0 | 0 | 0 | 1 | 0.117647 | false | 0 | 0.020221 | 0.053309 | 0.338235 | 0.003676 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
ebf7f2e9fb0ee5252b7a620b65b9ce10eb8813c4 | 1,418 | py | Python | v1/confirmation_blocks/migrations/0001_initial.py | Kenan7/Bank | 45e2a558dd725dcc9c9995e03dbc0d02221b3710 | [
"MIT"
] | 2 | 2021-02-26T03:20:02.000Z | 2021-05-21T14:18:56.000Z | v1/confirmation_blocks/migrations/0001_initial.py | shahraizali/Bank | d9cb926dd51af91e00199444b523f97c1b5fa3c7 | [
"MIT"
] | null | null | null | v1/confirmation_blocks/migrations/0001_initial.py | shahraizali/Bank | d9cb926dd51af91e00199444b523f97c1b5fa3c7 | [
"MIT"
] | null | null | null | # Generated by Django 3.0.6 on 2020-07-14 02:47
from django.db import migrations, models
import django.db.models.deletion
import uuid
class Migration(migrations.Migration):
initial = True
dependencies = [
('blocks', '0001_initial'),
('validators', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='ConfirmationBlock',
fields=[
('created_date', models.DateTimeField(auto_now_add=True, db_index=True, null=True)),
('modified_date', models.DateTimeField(auto_now=True, db_index=True)),
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('block_identifier', models.CharField(max_length=64)),
('block', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='confirmation_blocks', to='blocks.Block')),
('validator', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='confirmation_blocks', to='validators.Validator')),
],
options={
'default_related_name': 'confirmation_blocks',
},
),
migrations.AddConstraint(
model_name='confirmationblock',
constraint=models.UniqueConstraint(fields=('block', 'validator'), name='unique_block_validator'),
),
]
| 38.324324 | 157 | 0.62976 | 142 | 1,418 | 6.119718 | 0.485915 | 0.036824 | 0.048331 | 0.075949 | 0.262371 | 0.193326 | 0.193326 | 0.193326 | 0.193326 | 0.193326 | 0 | 0.024141 | 0.24048 | 1,418 | 36 | 158 | 39.388889 | 0.78273 | 0.031735 | 0 | 0.068966 | 1 | 0 | 0.201313 | 0.016047 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.103448 | 0 | 0.241379 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
ebf8eea63d882c0577cd9961caae7db6fccc2e2e | 747 | py | Python | main.py | Karaaslan29/gooz-operating-system-esp32 | 044300251027827af50f47b53f5d4a3e7304d535 | [
"MIT"
] | 2 | 2021-12-11T23:50:48.000Z | 2021-12-20T11:21:25.000Z | main.py | Karaaslan29/gooz-operating-system-esp32 | 044300251027827af50f47b53f5d4a3e7304d535 | [
"MIT"
] | null | null | null | main.py | Karaaslan29/gooz-operating-system-esp32 | 044300251027827af50f47b53f5d4a3e7304d535 | [
"MIT"
] | 1 | 2021-12-11T23:50:50.000Z | 2021-12-11T23:50:50.000Z | import gooz_basic
import dev.gooz_thread
import os
username = "Gorkem"
password = "1234"
login_flag = False
print("Welcome to GoozOS")
usr = input("Username: ")
if usr == username:
paswd = input("Password: ")
if paswd == password:
login_flag = True
else:
print("Wrong Password")
else:
print("User not found")
while(login_flag):
print(username+"@RPi_Pico:",end="")
print(os.getcwd(),end=" ")
msg = input(">> ")
cmd_list = gooz_basic.command_analyzator(msg)
gooz_basic.add_run_commands(cmd_list)
gooz_basic.history.append(cmd_list)
if cmd_list[0] == "shutdown":
os.chdir("/")
print("System will be shutdown")
dev.gooz_thread.exit_flag = 1
break
| 20.189189 | 49 | 0.630522 | 97 | 747 | 4.670103 | 0.525773 | 0.07947 | 0.057395 | 0.07064 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.010471 | 0.232932 | 747 | 36 | 50 | 20.75 | 0.780105 | 0 | 0 | 0.071429 | 0 | 0 | 0.162416 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.142857 | 0.107143 | 0 | 0.107143 | 0.214286 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
230dc25584b2262a1807edf8315b2ffc32a31b8f | 430 | py | Python | src/nti/zodb/schema.py | NextThought/nti.zodb | 2eb56b6a617fe83c738bfc4651b31d080856e3fc | [
"Apache-2.0"
] | null | null | null | src/nti/zodb/schema.py | NextThought/nti.zodb | 2eb56b6a617fe83c738bfc4651b31d080856e3fc | [
"Apache-2.0"
] | 10 | 2017-06-08T12:24:31.000Z | 2021-04-01T16:52:38.000Z | src/nti/zodb/schema.py | NextThought/nti.zodb | 2eb56b6a617fe83c738bfc4651b31d080856e3fc | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Deprecated, do not use.
"""
from __future__ import print_function, absolute_import, division
__docformat__ = "restructuredtext en"
logger = __import__('logging').getLogger(__name__)
import zope.deferredimport
zope.deferredimport.initialize()
zope.deferredimport.deprecatedFrom(
"Moved to nti.schema.field",
"nti.schema.field",
"FieldValidationMixin",
"Number")
| 22.631579 | 64 | 0.734884 | 46 | 430 | 6.478261 | 0.782609 | 0.181208 | 0.09396 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.002667 | 0.127907 | 430 | 18 | 65 | 23.888889 | 0.792 | 0.153488 | 0 | 0 | 0 | 0 | 0.261972 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.5 | 0 | 0.5 | 0.1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
230feb929ca3772da2f09e132d21e4c7017612de | 766 | py | Python | user/collection/manager/insert_one.py | dsvalenciah/ROAp | 24cbff0e719c5009ec1f1e7190924d4d9297e992 | [
"MIT"
] | 4 | 2018-04-23T00:04:01.000Z | 2018-10-28T22:56:51.000Z | user/collection/manager/insert_one.py | dsvalenciah/ROAp | 24cbff0e719c5009ec1f1e7190924d4d9297e992 | [
"MIT"
] | 23 | 2017-12-22T08:27:35.000Z | 2021-12-13T19:57:35.000Z | user/collection/manager/insert_one.py | dsvalenciah/ROAp | 24cbff0e719c5009ec1f1e7190924d4d9297e992 | [
"MIT"
] | 1 | 2020-06-03T02:07:26.000Z | 2020-06-03T02:07:26.000Z |
from manager.exceptions.user import UserSchemaError, UserDuplicateEmailError
from manager.schemas.user import User
def insert_one(db_client, user, language):
"""Insert user."""
# TODO: validate password and initial schema
# TODO: add rq for process email sending
# TODO: add resource for re try email sending if it fails
user_with_similar_email = db_client.users.find_one(
{'email': user.get('email')}
)
_ = language
if user_with_similar_email:
raise UserDuplicateEmailError(
_('User with specified email already exist.')
)
user, errors = User().dump(user)
if errors:
raise UserSchemaError(errors)
result = db_client.users.insert_one(user)
return result.inserted_id
| 26.413793 | 76 | 0.689295 | 94 | 766 | 5.457447 | 0.5 | 0.046784 | 0.05848 | 0.077973 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.22846 | 766 | 28 | 77 | 27.357143 | 0.86802 | 0.197128 | 0 | 0 | 0 | 0 | 0.082508 | 0 | 0 | 0 | 0 | 0.035714 | 0 | 1 | 0.0625 | false | 0 | 0.125 | 0 | 0.25 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
2317ef8bf191670555aac3e747f140eb977cdda5 | 220 | py | Python | extract_txt.py | psuresh21/real-time_weather_status | 7891222a17a8cdb3c530d31c08be1e5d00c7eb4d | [
"MIT"
] | null | null | null | extract_txt.py | psuresh21/real-time_weather_status | 7891222a17a8cdb3c530d31c08be1e5d00c7eb4d | [
"MIT"
] | null | null | null | extract_txt.py | psuresh21/real-time_weather_status | 7891222a17a8cdb3c530d31c08be1e5d00c7eb4d | [
"MIT"
] | null | null | null | import re
def ext_txt(on_time_weather,mains):
for m in mains:
if "span" in m:
xs = re.sub(r'span class|=|\[A-Z][A-Z][A-Z]\d\w>','',m)
x = xs.strip().split('>')[1]
on_time_weather.append(x)
else:
m = ''
| 20 | 58 | 0.568182 | 44 | 220 | 2.727273 | 0.636364 | 0.05 | 0.216667 | 0.066667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.005618 | 0.190909 | 220 | 10 | 59 | 22 | 0.668539 | 0 | 0 | 0 | 0 | 0 | 0.177273 | 0.131818 | 0 | 0 | 0 | 0 | 0 | 1 | 0.111111 | false | 0 | 0.111111 | 0 | 0.222222 | 0 | 0 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
231853c19cd9360072f2d871b60b56e941241b06 | 1,254 | py | Python | exploratory_analysis/author_scan.py | chuajiesheng/twitter-sentiment-analysis | 7617243c953a20c517a737c79fe0f54e55aef140 | [
"Apache-2.0"
] | null | null | null | exploratory_analysis/author_scan.py | chuajiesheng/twitter-sentiment-analysis | 7617243c953a20c517a737c79fe0f54e55aef140 | [
"Apache-2.0"
] | null | null | null | exploratory_analysis/author_scan.py | chuajiesheng/twitter-sentiment-analysis | 7617243c953a20c517a737c79fe0f54e55aef140 | [
"Apache-2.0"
] | null | null | null | import os
from utils import Reader
import code
import sys
def extract_authors(tweets):
for t in tweets:
if t.is_post():
actor = t.actor()
print '"{}","{}","{}","{}",{},{}'.format(actor['id'],
actor['link'],
actor['preferredUsername'],
actor['displayName'], 1, 0)
elif t.is_share():
original_tweet = t.data['object']
actor = original_tweet['actor']
print '"{}","{}","{}","{}",{},{}'.format(actor['id'],
actor['link'],
actor['preferredUsername'],
actor['displayName'], 0, 1)
else:
print 'Neither post nor share:', t.id()
if __name__ == '__main__':
# coding=utf-8
reload(sys)
sys.setdefaultencoding('utf-8')
working_directory = os.getcwd()
files = Reader.read_directory(working_directory)
for f in files:
extract_authors(Reader.read_file(f))
# code.interact(local=dict(globals(), **locals()))
| 30.585366 | 80 | 0.42823 | 108 | 1,254 | 4.805556 | 0.5 | 0.05395 | 0.061657 | 0.080925 | 0.26975 | 0.26975 | 0.26975 | 0.26975 | 0.26975 | 0.26975 | 0 | 0.008357 | 0.427432 | 1,254 | 40 | 81 | 31.35 | 0.714485 | 0.048644 | 0 | 0.214286 | 0 | 0 | 0.138655 | 0.042017 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.142857 | null | null | 0.107143 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
232d06e8f8a121db8488a5d3fb023cffdbccd33f | 671 | py | Python | HotPlutonium/RunAnalyse.py | DweebsUnited/CodeMonkey | 3b27e9c189c897b06002498aea639bb44671848a | [
"BSD-3-Clause"
] | null | null | null | HotPlutonium/RunAnalyse.py | DweebsUnited/CodeMonkey | 3b27e9c189c897b06002498aea639bb44671848a | [
"BSD-3-Clause"
] | 7 | 2016-06-03T05:41:27.000Z | 2018-08-07T07:09:40.000Z | HotPlutonium/RunAnalyse.py | DweebsUnited/CodeMonkey | 3b27e9c189c897b06002498aea639bb44671848a | [
"BSD-3-Clause"
] | null | null | null | from sys import argv
from subprocess import call
if len( argv ) != 2:
print "Format: RunAnalyse.py {input.csv}"
fname = argv[ 1 ].split( '.' )
fname, fext = fname[ 0 ], fname[ 1 ]
fcsv = fname + ".csv"
fjson = fname + ".json"
ftri = fname + "Tri.json"
fobj = fname + ".obj"
if fext == ".csv":
print "Running CSV to JSON"
call( [ 'python', 'CSVtoJSON.py', fcsv, fjson ] )
print "Running triangulator"
call( [ "../TombstoneTriangulator/TombstoneTriangulator", fjson, ftri ] )
print "Copying to StrawLobster"
call( [ "cp", ftri, "../StrawLobster/data/triangulation.json" ] )
print "Generating OBJ model"
call( [ "python", "GenOBJ.py", ftri, fobj ] )
| 21.645161 | 74 | 0.639344 | 84 | 671 | 5.107143 | 0.488095 | 0.055944 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.007366 | 0.19076 | 671 | 30 | 75 | 22.366667 | 0.782689 | 0 | 0 | 0 | 0 | 0 | 0.388972 | 0.126677 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.105263 | null | null | 0.263158 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
2331d332a574ec4b1bf94ad40d00b1dbb4150c13 | 1,074 | py | Python | basta/migrations/0005_auto_20200419_2006.py | lorenzosp93/basta_app | 0eba51f648f74d21e84d30d1047a4bedf79dbbaa | [
"MIT"
] | 1 | 2021-03-23T19:14:27.000Z | 2021-03-23T19:14:27.000Z | basta/migrations/0005_auto_20200419_2006.py | lorenzosp93/basta_app | 0eba51f648f74d21e84d30d1047a4bedf79dbbaa | [
"MIT"
] | 4 | 2021-03-30T14:20:11.000Z | 2021-06-10T20:12:34.000Z | basta/migrations/0005_auto_20200419_2006.py | lorenzosp93/basta_app | 0eba51f648f74d21e84d30d1047a4bedf79dbbaa | [
"MIT"
] | null | null | null | # Generated by Django 3.0.5 on 2020-04-19 18:06
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('basta', '0004_session_slug'),
]
operations = [
migrations.AlterField(
model_name='play',
name='cur_round',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='play_set', to='basta.Round'),
),
migrations.AlterField(
model_name='play',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='play_set', to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='round',
name='session',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='round_set', to='basta.Session', verbose_name='Session'),
),
]
| 33.5625 | 151 | 0.648976 | 123 | 1,074 | 5.495935 | 0.373984 | 0.059172 | 0.08284 | 0.130178 | 0.442308 | 0.442308 | 0.33284 | 0.33284 | 0.33284 | 0.33284 | 0 | 0.022975 | 0.229981 | 1,074 | 31 | 152 | 34.645161 | 0.794438 | 0.041899 | 0 | 0.32 | 1 | 0 | 0.108082 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.12 | 0 | 0.24 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
233540ad3d07aec4882229dd9b758747f67076a4 | 32,981 | py | Python | ssmbase.py | Chicone/SSM-VPR | 307ef83eee33d63ee6aab4a669fad06581888c0a | [
"MIT"
] | 5 | 2020-07-12T22:24:21.000Z | 2021-08-10T09:56:32.000Z | ssmbase.py | Chicone/SSM-VPR | 307ef83eee33d63ee6aab4a669fad06581888c0a | [
"MIT"
] | null | null | null | ssmbase.py | Chicone/SSM-VPR | 307ef83eee33d63ee6aab4a669fad06581888c0a | [
"MIT"
] | 2 | 2020-06-24T08:46:13.000Z | 2021-04-17T03:36:00.000Z | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'ssm.ui'
#
# Created by: PyQt5 UI code generator 5.9.2
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(1262, 783)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(MainWindow.sizePolicy().hasHeightForWidth())
MainWindow.setSizePolicy(sizePolicy)
MainWindow.setUnifiedTitleAndToolBarOnMac(True)
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setMinimumSize(QtCore.QSize(1200, 730))
self.centralwidget.setObjectName("centralwidget")
self.groupBox_7 = QtWidgets.QGroupBox(self.centralwidget)
self.groupBox_7.setGeometry(QtCore.QRect(590, 299, 671, 411))
self.groupBox_7.setObjectName("groupBox_7")
self.queryGroupBox = QtWidgets.QGroupBox(self.centralwidget)
self.queryGroupBox.setGeometry(QtCore.QRect(590, 30, 224, 231))
self.queryGroupBox.setStyleSheet("")
self.queryGroupBox.setObjectName("queryGroupBox")
self.scrollArea = QtWidgets.QScrollArea(self.queryGroupBox)
self.scrollArea.setGeometry(QtCore.QRect(0, 20, 221, 211))
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.scrollArea.sizePolicy().hasHeightForWidth())
self.scrollArea.setSizePolicy(sizePolicy)
self.scrollArea.setWidgetResizable(True)
self.scrollArea.setObjectName("scrollArea")
self.scrollAreaWidgetContents = QtWidgets.QWidget()
self.scrollAreaWidgetContents.setGeometry(QtCore.QRect(0, 0, 219, 209))
self.scrollAreaWidgetContents.setObjectName("scrollAreaWidgetContents")
self.queryImageLabel = QtWidgets.QLabel(self.scrollAreaWidgetContents)
self.queryImageLabel.setGeometry(QtCore.QRect(0, 0, 224, 224))
self.queryImageLabel.setText("")
self.queryImageLabel.setObjectName("queryImageLabel")
self.scrollArea.setWidget(self.scrollAreaWidgetContents)
self.stage1_groupBox = QtWidgets.QGroupBox(self.centralwidget)
self.stage1_groupBox.setEnabled(True)
self.stage1_groupBox.setGeometry(QtCore.QRect(30, 30, 161, 411))
self.stage1_groupBox.setContextMenuPolicy(QtCore.Qt.DefaultContextMenu)
self.stage1_groupBox.setStyleSheet("QGroupBox {\n"
" border: 1px solid gray;\n"
" border-radius: 9px;\n"
" margin-top: 0.5em;\n"
"}\n"
"\n"
"QGroupBox::title {\n"
" subcontrol-origin: margin;\n"
" left: 3px;\n"
" padding: 3 0 3 0;\n"
"}\n"
"\n"
"")
self.stage1_groupBox.setObjectName("stage1_groupBox")
self.imageSizeGroupBox_s1 = QtWidgets.QGroupBox(self.stage1_groupBox)
self.imageSizeGroupBox_s1.setGeometry(QtCore.QRect(10, 30, 141, 101))
self.imageSizeGroupBox_s1.setMaximumSize(QtCore.QSize(160, 16777215))
self.imageSizeGroupBox_s1.setStyleSheet("QGroupBox {\n"
" border: 1px solid gray;\n"
" border-radius: 9px;\n"
" margin-top: 0.5em;\n"
"}\n"
"\n"
"QGroupBox::title {\n"
" subcontrol-origin: margin;\n"
" left: 3px;\n"
" padding: 3 0 3 0;\n"
"}\n"
"")
self.imageSizeGroupBox_s1.setObjectName("imageSizeGroupBox_s1")
self.imageWidthLineEdit_s1 = QtWidgets.QLineEdit(self.imageSizeGroupBox_s1)
self.imageWidthLineEdit_s1.setGeometry(QtCore.QRect(60, 30, 41, 27))
self.imageWidthLineEdit_s1.setAutoFillBackground(False)
self.imageWidthLineEdit_s1.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.imageWidthLineEdit_s1.setReadOnly(False)
self.imageWidthLineEdit_s1.setPlaceholderText("")
self.imageWidthLineEdit_s1.setObjectName("imageWidthLineEdit_s1")
self.imageHeightLineEdit_s1 = QtWidgets.QLineEdit(self.imageSizeGroupBox_s1)
self.imageHeightLineEdit_s1.setGeometry(QtCore.QRect(60, 65, 41, 27))
self.imageHeightLineEdit_s1.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.imageHeightLineEdit_s1.setReadOnly(False)
self.imageHeightLineEdit_s1.setObjectName("imageHeightLineEdit_s1")
self.label = QtWidgets.QLabel(self.imageSizeGroupBox_s1)
self.label.setGeometry(QtCore.QRect(10, 30, 80, 30))
self.label.setObjectName("label")
self.label_2 = QtWidgets.QLabel(self.imageSizeGroupBox_s1)
self.label_2.setGeometry(QtCore.QRect(7, 70, 66, 16))
self.label_2.setObjectName("label_2")
self.label_3 = QtWidgets.QLabel(self.imageSizeGroupBox_s1)
self.label_3.setGeometry(QtCore.QRect(110, 30, 21, 30))
self.label_3.setObjectName("label_3")
self.label_4 = QtWidgets.QLabel(self.imageSizeGroupBox_s1)
self.label_4.setGeometry(QtCore.QRect(110, 70, 30, 20))
self.label_4.setObjectName("label_4")
self.groupBox_2 = QtWidgets.QGroupBox(self.stage1_groupBox)
self.groupBox_2.setGeometry(QtCore.QRect(10, 140, 141, 151))
self.groupBox_2.setMaximumSize(QtCore.QSize(160, 16777215))
self.groupBox_2.setStyleSheet("QGroupBox {\n"
" border: 1px solid gray;\n"
" border-radius: 9px;\n"
" margin-top: 0.5em;\n"
"}\n"
"\n"
"QGroupBox::title {\n"
" subcontrol-origin: margin;\n"
" left: 3px;\n"
" padding: 3 0 3 0;\n"
"}\n"
"")
self.groupBox_2.setObjectName("groupBox_2")
self.vggRadioButton = QtWidgets.QRadioButton(self.groupBox_2)
self.vggRadioButton.setGeometry(QtCore.QRect(20, 30, 115, 22))
self.vggRadioButton.setObjectName("vggRadioButton")
self.netvladRadioButton = QtWidgets.QRadioButton(self.groupBox_2)
self.netvladRadioButton.setGeometry(QtCore.QRect(20, 122, 115, 20))
self.netvladRadioButton.setObjectName("netvladRadioButton")
self.resnetRadioButton = QtWidgets.QRadioButton(self.groupBox_2)
self.resnetRadioButton.setGeometry(QtCore.QRect(20, 60, 115, 22))
self.resnetRadioButton.setObjectName("resnetRadioButton")
self.googlenetRadioButton = QtWidgets.QRadioButton(self.groupBox_2)
self.googlenetRadioButton.setGeometry(QtCore.QRect(20, 90, 115, 22))
self.googlenetRadioButton.setObjectName("googlenetRadioButton")
self.groupBox_8 = QtWidgets.QGroupBox(self.stage1_groupBox)
self.groupBox_8.setGeometry(QtCore.QRect(10, 300, 141, 100))
self.groupBox_8.setMaximumSize(QtCore.QSize(160, 16777215))
self.groupBox_8.setStyleSheet("QGroupBox {\n"
" border: 1px solid gray;\n"
" border-radius: 9px;\n"
" margin-top: 0.5em;\n"
"}\n"
"\n"
"QGroupBox::title {\n"
" subcontrol-origin: margin;\n"
" left: 3px;\n"
" padding: 3 0 3 0;\n"
"}\n"
"")
self.groupBox_8.setObjectName("groupBox_8")
self.pcaDimLineEdit_s1 = QtWidgets.QLineEdit(self.groupBox_8)
self.pcaDimLineEdit_s1.setGeometry(QtCore.QRect(80, 20, 51, 27))
self.pcaDimLineEdit_s1.setAutoFillBackground(False)
self.pcaDimLineEdit_s1.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.pcaDimLineEdit_s1.setReadOnly(False)
self.pcaDimLineEdit_s1.setPlaceholderText("")
self.pcaDimLineEdit_s1.setObjectName("pcaDimLineEdit_s1")
self.label_21 = QtWidgets.QLabel(self.groupBox_8)
self.label_21.setGeometry(QtCore.QRect(10, 20, 80, 30))
self.label_21.setObjectName("label_21")
self.label_22 = QtWidgets.QLabel(self.groupBox_8)
self.label_22.setGeometry(QtCore.QRect(10, 50, 80, 30))
self.label_22.setObjectName("label_22")
self.pcaSamplesLineEdit_s1 = QtWidgets.QLineEdit(self.groupBox_8)
self.pcaSamplesLineEdit_s1.setGeometry(QtCore.QRect(80, 50, 51, 27))
self.pcaSamplesLineEdit_s1.setAutoFillBackground(False)
self.pcaSamplesLineEdit_s1.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.pcaSamplesLineEdit_s1.setReadOnly(False)
self.pcaSamplesLineEdit_s1.setPlaceholderText("")
self.pcaSamplesLineEdit_s1.setObjectName("pcaSamplesLineEdit_s1")
self.groupBox_3 = QtWidgets.QGroupBox(self.centralwidget)
self.groupBox_3.setGeometry(QtCore.QRect(30, 460, 531, 131))
self.groupBox_3.setStyleSheet("QGroupBox {\n"
" border: 1px solid gray;\n"
" border-radius: 9px;\n"
" margin-top: 0.5em;\n"
"}\n"
"\n"
"QGroupBox::title {\n"
" subcontrol-origin: margin;\n"
" left: 3px;\n"
" padding: 3 0 3 0;\n"
"}\n"
"\n"
"")
self.groupBox_3.setObjectName("groupBox_3")
self.btnLoadReference = QtWidgets.QPushButton(self.groupBox_3)
self.btnLoadReference.setGeometry(QtCore.QRect(20, 24, 101, 27))
self.btnLoadReference.setObjectName("btnLoadReference")
self.btnLoadTest = QtWidgets.QPushButton(self.groupBox_3)
self.btnLoadTest.setGeometry(QtCore.QRect(20, 60, 101, 27))
self.btnLoadTest.setObjectName("btnLoadTest")
self.btnLoadGroungTruth = QtWidgets.QPushButton(self.groupBox_3)
self.btnLoadGroungTruth.setGeometry(QtCore.QRect(20, 96, 101, 27))
self.btnLoadGroungTruth.setObjectName("btnLoadGroungTruth")
self.refOkLabel = QtWidgets.QLabel(self.groupBox_3)
self.refOkLabel.setGeometry(QtCore.QRect(130, 30, 391, 17))
font = QtGui.QFont()
font.setPointSize(9)
font.setItalic(True)
self.refOkLabel.setFont(font)
self.refOkLabel.setText("")
self.refOkLabel.setObjectName("refOkLabel")
self.testOkLabel = QtWidgets.QLabel(self.groupBox_3)
self.testOkLabel.setGeometry(QtCore.QRect(130, 66, 391, 17))
font = QtGui.QFont()
font.setPointSize(9)
font.setItalic(True)
self.testOkLabel.setFont(font)
self.testOkLabel.setText("")
self.testOkLabel.setObjectName("testOkLabel")
self.groundTruthOkLabel = QtWidgets.QLabel(self.groupBox_3)
self.groundTruthOkLabel.setGeometry(QtCore.QRect(130, 103, 391, 17))
font = QtGui.QFont()
font.setPointSize(9)
font.setItalic(True)
self.groundTruthOkLabel.setFont(font)
self.groundTruthOkLabel.setText("")
self.groundTruthOkLabel.setObjectName("groundTruthOkLabel")
self.groupBox_4 = QtWidgets.QGroupBox(self.centralwidget)
self.groupBox_4.setGeometry(QtCore.QRect(30, 610, 161, 111))
self.groupBox_4.setStyleSheet("QGroupBox {\n"
" border: 1px solid gray;\n"
" border-radius: 9px;\n"
" margin-top: 0.5em;\n"
"}\n"
"\n"
"QGroupBox::title {\n"
" subcontrol-origin: margin;\n"
" left: 3px;\n"
" padding: 3 0 3 0;\n"
"}\n"
"\n"
"")
self.groupBox_4.setObjectName("groupBox_4")
self.btnCreateDB = QtWidgets.QPushButton(self.groupBox_4)
self.btnCreateDB.setGeometry(QtCore.QRect(30, 30, 91, 27))
self.btnCreateDB.setObjectName("btnCreateDB")
self.btnRecognition = QtWidgets.QPushButton(self.groupBox_4)
self.btnRecognition.setGeometry(QtCore.QRect(30, 70, 91, 27))
self.btnRecognition.setObjectName("btnRecognition")
self.textBrowser = QtWidgets.QTextBrowser(self.centralwidget)
self.textBrowser.setGeometry(QtCore.QRect(590, 320, 671, 401))
font = QtGui.QFont()
font.setFamily("Monospace")
font.setPointSize(8)
self.textBrowser.setFont(font)
self.textBrowser.setStyleSheet("QGroupBox {\n"
" border: 1px solid gray;\n"
" border-radius: 9px;\n"
" margin-top: 0.5em;\n"
"}\n"
"\n"
"QGroupBox::title {\n"
" subcontrol-origin: margin;\n"
" left: 3px;\n"
" padding: 3 0 3 0;\n"
"}\n"
"\n"
"")
self.textBrowser.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOn)
self.textBrowser.setTextInteractionFlags(QtCore.Qt.LinksAccessibleByKeyboard|QtCore.Qt.LinksAccessibleByMouse|QtCore.Qt.TextBrowserInteraction|QtCore.Qt.TextSelectableByKeyboard|QtCore.Qt.TextSelectableByMouse)
self.textBrowser.setObjectName("textBrowser")
self.outputGroupBox = QtWidgets.QGroupBox(self.centralwidget)
self.outputGroupBox.setGeometry(QtCore.QRect(810, 30, 224, 231))
self.outputGroupBox.setStyleSheet("")
self.outputGroupBox.setObjectName("outputGroupBox")
self.scrollArea_4 = QtWidgets.QScrollArea(self.outputGroupBox)
self.scrollArea_4.setGeometry(QtCore.QRect(0, 20, 221, 211))
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.scrollArea_4.sizePolicy().hasHeightForWidth())
self.scrollArea_4.setSizePolicy(sizePolicy)
self.scrollArea_4.setWidgetResizable(True)
self.scrollArea_4.setObjectName("scrollArea_4")
self.scrollAreaWidgetContents_4 = QtWidgets.QWidget()
self.scrollAreaWidgetContents_4.setGeometry(QtCore.QRect(0, 0, 219, 209))
self.scrollAreaWidgetContents_4.setObjectName("scrollAreaWidgetContents_4")
self.outputImageLabel = QtWidgets.QLabel(self.scrollAreaWidgetContents_4)
self.outputImageLabel.setGeometry(QtCore.QRect(0, 0, 224, 224))
self.outputImageLabel.setText("")
self.outputImageLabel.setObjectName("outputImageLabel")
self.scrollArea_4.setWidget(self.scrollAreaWidgetContents_4)
self.referenceGroupBox = QtWidgets.QGroupBox(self.centralwidget)
self.referenceGroupBox.setGeometry(QtCore.QRect(1030, 30, 224, 231))
self.referenceGroupBox.setStyleSheet("")
self.referenceGroupBox.setObjectName("referenceGroupBox")
self.scrollArea_6 = QtWidgets.QScrollArea(self.referenceGroupBox)
self.scrollArea_6.setGeometry(QtCore.QRect(0, 20, 221, 211))
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.scrollArea_6.sizePolicy().hasHeightForWidth())
self.scrollArea_6.setSizePolicy(sizePolicy)
self.scrollArea_6.setWidgetResizable(True)
self.scrollArea_6.setObjectName("scrollArea_6")
self.scrollAreaWidgetContents_6 = QtWidgets.QWidget()
self.scrollAreaWidgetContents_6.setGeometry(QtCore.QRect(0, 0, 219, 209))
self.scrollAreaWidgetContents_6.setObjectName("scrollAreaWidgetContents_6")
self.referenceImageLabel = QtWidgets.QLabel(self.scrollAreaWidgetContents_6)
self.referenceImageLabel.setGeometry(QtCore.QRect(1, 0, 224, 224))
self.referenceImageLabel.setText("")
self.referenceImageLabel.setObjectName("referenceImageLabel")
self.scrollArea_6.setWidget(self.scrollAreaWidgetContents_6)
self.groupBox_5 = QtWidgets.QGroupBox(self.centralwidget)
self.groupBox_5.setGeometry(QtCore.QRect(400, 610, 161, 111))
self.groupBox_5.setStyleSheet("QGroupBox {\n"
" border: 1px solid gray;\n"
" border-radius: 9px;\n"
" margin-top: 0.5em;\n"
"}\n"
"\n"
"QGroupBox::title {\n"
" subcontrol-origin: margin;\n"
" left: 3px;\n"
" padding: 3 0 3 0;\n"
"}\n"
"\n"
"")
self.groupBox_5.setObjectName("groupBox_5")
self.btnPause = QtWidgets.QPushButton(self.groupBox_5)
self.btnPause.setGeometry(QtCore.QRect(30, 30, 91, 27))
self.btnPause.setObjectName("btnPause")
self.btnStop = QtWidgets.QPushButton(self.groupBox_5)
self.btnStop.setGeometry(QtCore.QRect(30, 70, 91, 27))
self.btnStop.setObjectName("btnStop")
self.stage2_groupBox = QtWidgets.QGroupBox(self.centralwidget)
self.stage2_groupBox.setGeometry(QtCore.QRect(210, 30, 161, 411))
self.stage2_groupBox.setStyleSheet("QGroupBox {\n"
" border: 1px solid gray;\n"
" border-radius: 9px;\n"
" margin-top: 0.5em;\n"
"}\n"
"\n"
"QGroupBox::title {\n"
" subcontrol-origin: margin;\n"
" left: 3px;\n"
" padding: 3 0 3 0;\n"
"}\n"
"\n"
"")
self.stage2_groupBox.setObjectName("stage2_groupBox")
self.groupBox_10 = QtWidgets.QGroupBox(self.stage2_groupBox)
self.groupBox_10.setGeometry(QtCore.QRect(10, 30, 141, 101))
self.groupBox_10.setMaximumSize(QtCore.QSize(160, 16777215))
self.groupBox_10.setStyleSheet("QGroupBox {\n"
" border: 1px solid gray;\n"
" border-radius: 9px;\n"
" margin-top: 0.5em;\n"
"}\n"
"\n"
"QGroupBox::title {\n"
" subcontrol-origin: margin;\n"
" left: 3px;\n"
" padding: 3 0 3 0;\n"
"}\n"
"\n"
"")
self.groupBox_10.setObjectName("groupBox_10")
self.imageWidthLineEdit_s2 = QtWidgets.QLineEdit(self.groupBox_10)
self.imageWidthLineEdit_s2.setGeometry(QtCore.QRect(60, 29, 41, 27))
self.imageWidthLineEdit_s2.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.imageWidthLineEdit_s2.setReadOnly(False)
self.imageWidthLineEdit_s2.setPlaceholderText("")
self.imageWidthLineEdit_s2.setObjectName("imageWidthLineEdit_s2")
self.imageHeightLineEdit_s2 = QtWidgets.QLineEdit(self.groupBox_10)
self.imageHeightLineEdit_s2.setGeometry(QtCore.QRect(60, 63, 41, 30))
self.imageHeightLineEdit_s2.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.imageHeightLineEdit_s2.setReadOnly(False)
self.imageHeightLineEdit_s2.setObjectName("imageHeightLineEdit_s2")
self.label_8 = QtWidgets.QLabel(self.groupBox_10)
self.label_8.setGeometry(QtCore.QRect(10, 30, 80, 30))
self.label_8.setObjectName("label_8")
self.label_9 = QtWidgets.QLabel(self.groupBox_10)
self.label_9.setGeometry(QtCore.QRect(7, 70, 66, 16))
self.label_9.setObjectName("label_9")
self.label_10 = QtWidgets.QLabel(self.groupBox_10)
self.label_10.setGeometry(QtCore.QRect(110, 30, 21, 30))
self.label_10.setObjectName("label_10")
self.label_13 = QtWidgets.QLabel(self.groupBox_10)
self.label_13.setGeometry(QtCore.QRect(110, 66, 30, 31))
self.label_13.setObjectName("label_13")
self.groupBox_9 = QtWidgets.QGroupBox(self.stage2_groupBox)
self.groupBox_9.setGeometry(QtCore.QRect(10, 140, 141, 151))
self.groupBox_9.setMaximumSize(QtCore.QSize(160, 16777215))
self.groupBox_9.setStyleSheet("QGroupBox {\n"
" border: 1px solid gray;\n"
" border-radius: 9px;\n"
" margin-top: 0.5em;\n"
"}\n"
"\n"
"QGroupBox::title {\n"
" subcontrol-origin: margin;\n"
" left: 3px;\n"
" padding: 3 0 3 0;\n"
"}\n"
"")
self.groupBox_9.setObjectName("groupBox_9")
self.vggRadioButton_s2 = QtWidgets.QRadioButton(self.groupBox_9)
self.vggRadioButton_s2.setGeometry(QtCore.QRect(20, 30, 115, 22))
self.vggRadioButton_s2.setObjectName("vggRadioButton_s2")
self.resnetRadioButton_s2 = QtWidgets.QRadioButton(self.groupBox_9)
self.resnetRadioButton_s2.setGeometry(QtCore.QRect(20, 60, 115, 22))
self.resnetRadioButton_s2.setObjectName("resnetRadioButton_s2")
self.googlenetRadioButton_s2 = QtWidgets.QRadioButton(self.groupBox_9)
self.googlenetRadioButton_s2.setGeometry(QtCore.QRect(20, 90, 115, 22))
self.googlenetRadioButton_s2.setObjectName("googlenetRadioButton_s2")
self.groupBox_11 = QtWidgets.QGroupBox(self.stage2_groupBox)
self.groupBox_11.setGeometry(QtCore.QRect(10, 300, 141, 100))
self.groupBox_11.setMaximumSize(QtCore.QSize(160, 16777215))
self.groupBox_11.setStyleSheet("QGroupBox {\n"
" border: 1px solid gray;\n"
" border-radius: 9px;\n"
" margin-top: 0.5em;\n"
"}\n"
"\n"
"QGroupBox::title {\n"
" subcontrol-origin: margin;\n"
" left: 3px;\n"
" padding: 3 0 3 0;\n"
"}\n"
"")
self.groupBox_11.setObjectName("groupBox_11")
self.pcaDimLineEdit_s2 = QtWidgets.QLineEdit(self.groupBox_11)
self.pcaDimLineEdit_s2.setGeometry(QtCore.QRect(80, 20, 51, 27))
self.pcaDimLineEdit_s2.setAutoFillBackground(False)
self.pcaDimLineEdit_s2.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.pcaDimLineEdit_s2.setReadOnly(False)
self.pcaDimLineEdit_s2.setPlaceholderText("")
self.pcaDimLineEdit_s2.setObjectName("pcaDimLineEdit_s2")
self.label_23 = QtWidgets.QLabel(self.groupBox_11)
self.label_23.setGeometry(QtCore.QRect(10, 20, 80, 30))
self.label_23.setObjectName("label_23")
self.label_24 = QtWidgets.QLabel(self.groupBox_11)
self.label_24.setGeometry(QtCore.QRect(10, 50, 80, 30))
self.label_24.setObjectName("label_24")
self.pcaSamplesLineEdit_s2 = QtWidgets.QLineEdit(self.groupBox_11)
self.pcaSamplesLineEdit_s2.setGeometry(QtCore.QRect(80, 50, 51, 27))
self.pcaSamplesLineEdit_s2.setAutoFillBackground(False)
self.pcaSamplesLineEdit_s2.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.pcaSamplesLineEdit_s2.setReadOnly(False)
self.pcaSamplesLineEdit_s2.setPlaceholderText("")
self.pcaSamplesLineEdit_s2.setObjectName("pcaSamplesLineEdit_s2")
self.groupBox_6 = QtWidgets.QGroupBox(self.centralwidget)
self.groupBox_6.setGeometry(QtCore.QRect(220, 610, 161, 111))
self.groupBox_6.setStyleSheet("QGroupBox {\n"
" border: 1px solid gray;\n"
" border-radius: 9px;\n"
" margin-top: 0.5em;\n"
"}\n"
"\n"
"QGroupBox::title {\n"
" subcontrol-origin: margin;\n"
" left: 3px;\n"
" padding: 3 0 3 0;\n"
"}\n"
"\n"
"")
self.groupBox_6.setObjectName("groupBox_6")
self.btnSaveOutput = QtWidgets.QPushButton(self.groupBox_6)
self.btnSaveOutput.setGeometry(QtCore.QRect(30, 30, 91, 27))
self.btnSaveOutput.setObjectName("btnSaveOutput")
self.btnPRcurves = QtWidgets.QPushButton(self.groupBox_6)
self.btnPRcurves.setGeometry(QtCore.QRect(30, 70, 91, 27))
self.btnPRcurves.setObjectName("btnPRcurves")
self.groupBox_12 = QtWidgets.QGroupBox(self.centralwidget)
self.groupBox_12.setGeometry(QtCore.QRect(390, 30, 171, 221))
self.groupBox_12.setStyleSheet("QGroupBox {\n"
" border: 1px solid gray;\n"
" border-radius: 9px;\n"
" margin-top: 0.5em;\n"
"}\n"
"\n"
"QGroupBox::title {\n"
" subcontrol-origin: margin;\n"
" left: 3px;\n"
" padding: 3 0 3 0;\n"
"}\n"
"")
self.groupBox_12.setObjectName("groupBox_12")
self.label_17 = QtWidgets.QLabel(self.groupBox_12)
self.label_17.setGeometry(QtCore.QRect(10, 65, 111, 30))
self.label_17.setObjectName("label_17")
self.frameTolLineEdit = QtWidgets.QLineEdit(self.groupBox_12)
self.frameTolLineEdit.setGeometry(QtCore.QRect(125, 65, 38, 27))
self.frameTolLineEdit.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.frameTolLineEdit.setObjectName("frameTolLineEdit")
self.candidatesLineEdit = QtWidgets.QLineEdit(self.groupBox_12)
self.candidatesLineEdit.setGeometry(QtCore.QRect(125, 31, 38, 27))
self.candidatesLineEdit.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.candidatesLineEdit.setObjectName("candidatesLineEdit")
self.label_18 = QtWidgets.QLabel(self.groupBox_12)
self.label_18.setGeometry(QtCore.QRect(10, 32, 81, 30))
self.label_18.setObjectName("label_18")
self.prevFramesLineEdit = QtWidgets.QLineEdit(self.groupBox_12)
self.prevFramesLineEdit.setGeometry(QtCore.QRect(125, 100, 38, 27))
self.prevFramesLineEdit.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.prevFramesLineEdit.setObjectName("prevFramesLineEdit")
self.label_5 = QtWidgets.QLabel(self.groupBox_12)
self.label_5.setGeometry(QtCore.QRect(10, 100, 111, 31))
self.label_5.setText("Frame corr. (FC)")
self.label_5.setObjectName("label_5")
self.gpuGroupBox = QtWidgets.QGroupBox(self.centralwidget)
self.gpuGroupBox.setGeometry(QtCore.QRect(390, 310, 171, 131))
self.gpuGroupBox.setStyleSheet("QGroupBox {\n"
" border: 1px solid gray;\n"
" border-radius: 9px;\n"
" margin-top: 0.5em;\n"
"}\n"
"\n"
"QGroupBox::title {\n"
" subcontrol-origin: margin;\n"
" left: 3px;\n"
" padding: 3 0 3 0;\n"
"}\n"
"")
self.gpuGroupBox.setObjectName("gpuGroupBox")
self.label_12 = QtWidgets.QLabel(self.gpuGroupBox)
self.label_12.setGeometry(QtCore.QRect(10, 80, 111, 31))
self.label_12.setText("Max. candidates ")
self.label_12.setObjectName("label_12")
self.gpuCandLineEdit = QtWidgets.QLineEdit(self.gpuGroupBox)
self.gpuCandLineEdit.setGeometry(QtCore.QRect(125, 80, 38, 27))
self.gpuCandLineEdit.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.gpuCandLineEdit.setObjectName("gpuCandLineEdit")
self.loadDbOnGpuCheckBox = QtWidgets.QCheckBox(self.gpuGroupBox)
self.loadDbOnGpuCheckBox.setGeometry(QtCore.QRect(10, 40, 141, 22))
self.loadDbOnGpuCheckBox.setObjectName("loadDbOnGpuCheckBox")
self.useGpuCheckBox = QtWidgets.QCheckBox(self.centralwidget)
self.useGpuCheckBox.setGeometry(QtCore.QRect(390, 280, 96, 22))
self.useGpuCheckBox.setObjectName("useGpuCheckBox")
MainWindow.setCentralWidget(self.centralwidget)
self.statusbar = QtWidgets.QStatusBar(MainWindow)
self.statusbar.setObjectName("statusbar")
MainWindow.setStatusBar(self.statusbar)
self.menuBar = QtWidgets.QMenuBar(MainWindow)
self.menuBar.setGeometry(QtCore.QRect(0, 0, 1262, 25))
self.menuBar.setObjectName("menuBar")
self.menuAbout = QtWidgets.QMenu(self.menuBar)
self.menuAbout.setObjectName("menuAbout")
MainWindow.setMenuBar(self.menuBar)
self.actionOpen = QtWidgets.QAction(MainWindow)
self.actionOpen.setObjectName("actionOpen")
self.actionSpectrogram = QtWidgets.QAction(MainWindow)
self.actionSpectrogram.setObjectName("actionSpectrogram")
self.actionFrequency_Map = QtWidgets.QAction(MainWindow)
self.actionFrequency_Map.setObjectName("actionFrequency_Map")
self.actionSave_path = QtWidgets.QAction(MainWindow)
self.actionSave_path.setObjectName("actionSave_path")
self.actionAbout = QtWidgets.QAction(MainWindow)
self.actionAbout.setObjectName("actionAbout")
self.menuAbout.addAction(self.actionAbout)
self.menuBar.addAction(self.menuAbout.menuAction())
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "Visual Place Recognition interface"))
self.groupBox_7.setTitle(_translate("MainWindow", "Console"))
self.queryGroupBox.setTitle(_translate("MainWindow", "Query"))
self.stage1_groupBox.setTitle(_translate("MainWindow", "STAGE I "))
self.imageSizeGroupBox_s1.setTitle(_translate("MainWindow", "Image size"))
self.imageWidthLineEdit_s1.setText(_translate("MainWindow", "224"))
self.imageHeightLineEdit_s1.setText(_translate("MainWindow", "224"))
self.label.setText(_translate("MainWindow", "Width"))
self.label_2.setText(_translate("MainWindow", "Height"))
self.label_3.setText(_translate("MainWindow", "px"))
self.label_4.setText(_translate("MainWindow", "px"))
self.groupBox_2.setTitle(_translate("MainWindow", "Method"))
self.vggRadioButton.setText(_translate("MainWindow", "VGG16"))
self.netvladRadioButton.setText(_translate("MainWindow", "NetVLAD"))
self.resnetRadioButton.setText(_translate("MainWindow", "ResNet"))
self.googlenetRadioButton.setText(_translate("MainWindow", "GoogLeNet"))
self.groupBox_8.setTitle(_translate("MainWindow", "PCA"))
self.pcaDimLineEdit_s1.setText(_translate("MainWindow", "125"))
self.label_21.setText(_translate("MainWindow", "Dim."))
self.label_22.setText(_translate("MainWindow", "Samples"))
self.pcaSamplesLineEdit_s1.setText(_translate("MainWindow", "10000"))
self.groupBox_3.setTitle(_translate("MainWindow", "Select files"))
self.btnLoadReference.setText(_translate("MainWindow", "Reference dir"))
self.btnLoadTest.setText(_translate("MainWindow", "Test dir"))
self.btnLoadGroungTruth.setText(_translate("MainWindow", "Ground truth"))
self.groupBox_4.setTitle(_translate("MainWindow", "Run"))
self.btnCreateDB.setText(_translate("MainWindow", "Create DB"))
self.btnRecognition.setText(_translate("MainWindow", "Recognition"))
self.textBrowser.setHtml(_translate("MainWindow", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'Monospace\'; font-size:8pt; font-weight:400; font-style:normal;\">\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-family:\'Ubuntu\'; font-size:11pt;\"><br /></p></body></html>"))
self.outputGroupBox.setTitle(_translate("MainWindow", "Recognized"))
self.referenceGroupBox.setTitle(_translate("MainWindow", "Ground truth"))
self.groupBox_5.setTitle(_translate("MainWindow", "Controls"))
self.btnPause.setText(_translate("MainWindow", "Pause"))
self.btnStop.setText(_translate("MainWindow", "Stop"))
self.stage2_groupBox.setTitle(_translate("MainWindow", "STAGE II "))
self.groupBox_10.setTitle(_translate("MainWindow", "Image size"))
self.imageWidthLineEdit_s2.setText(_translate("MainWindow", "224"))
self.imageHeightLineEdit_s2.setText(_translate("MainWindow", "224"))
self.label_8.setText(_translate("MainWindow", "Width"))
self.label_9.setText(_translate("MainWindow", "Height"))
self.label_10.setText(_translate("MainWindow", "px"))
self.label_13.setText(_translate("MainWindow", "px"))
self.groupBox_9.setTitle(_translate("MainWindow", "Method"))
self.vggRadioButton_s2.setText(_translate("MainWindow", "VGG16"))
self.resnetRadioButton_s2.setText(_translate("MainWindow", "ResNet"))
self.googlenetRadioButton_s2.setText(_translate("MainWindow", "GoogLeNet"))
self.groupBox_11.setTitle(_translate("MainWindow", "PCA"))
self.pcaDimLineEdit_s2.setText(_translate("MainWindow", "100"))
self.label_23.setText(_translate("MainWindow", "Dim."))
self.label_24.setText(_translate("MainWindow", "Samples"))
self.pcaSamplesLineEdit_s2.setText(_translate("MainWindow", "10000"))
self.groupBox_6.setTitle(_translate("MainWindow", "Output"))
self.btnSaveOutput.setText(_translate("MainWindow", "Save "))
self.btnPRcurves.setText(_translate("MainWindow", "PR curves"))
self.groupBox_12.setTitle(_translate("MainWindow", "Hyperparameters"))
self.label_17.setText(_translate("MainWindow", "Frame tol."))
self.frameTolLineEdit.setText(_translate("MainWindow", "2"))
self.candidatesLineEdit.setText(_translate("MainWindow", "50"))
self.label_18.setText(_translate("MainWindow", "Candidates"))
self.prevFramesLineEdit.setText(_translate("MainWindow", "2"))
self.gpuGroupBox.setTitle(_translate("MainWindow", "GPU Options"))
self.gpuCandLineEdit.setText(_translate("MainWindow", "2"))
self.loadDbOnGpuCheckBox.setText(_translate("MainWindow", "Load DB on GPU"))
self.useGpuCheckBox.setText(_translate("MainWindow", "Use GPU"))
self.menuAbout.setTitle(_translate("MainWindow", "Help"))
self.actionOpen.setText(_translate("MainWindow", "Open Video"))
self.actionSpectrogram.setText(_translate("MainWindow", "Spectrogram"))
self.actionFrequency_Map.setText(_translate("MainWindow", "Frequency Map"))
self.actionSave_path.setText(_translate("MainWindow", "Save to directory"))
self.actionAbout.setText(_translate("MainWindow", "About"))
| 51.938583 | 218 | 0.70671 | 3,598 | 32,981 | 6.364925 | 0.103391 | 0.051351 | 0.074931 | 0.017816 | 0.474739 | 0.430898 | 0.325706 | 0.254094 | 0.239073 | 0.184228 | 0 | 0.051213 | 0.161669 | 32,981 | 634 | 219 | 52.020505 | 0.777062 | 0.005336 | 0 | 0.313505 | 1 | 0 | 0.166479 | 0.006921 | 0 | 0 | 0 | 0 | 0 | 1 | 0.003215 | false | 0 | 0.001608 | 0 | 0.006431 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
2336b0fde14c5c5284f5355608e21e8f68c59b7f | 2,865 | py | Python | main.py | angrycaptain19/BU-Patient-Connect-Shortcuts | 6d0ec1d4e23e4c8f81216db12fd70e8c6699674f | [
"MIT"
] | null | null | null | main.py | angrycaptain19/BU-Patient-Connect-Shortcuts | 6d0ec1d4e23e4c8f81216db12fd70e8c6699674f | [
"MIT"
] | 1 | 2021-03-02T09:55:54.000Z | 2021-03-02T09:55:54.000Z | main.py | angrycaptain19/BU-Patient-Connect-Shortcuts | 6d0ec1d4e23e4c8f81216db12fd70e8c6699674f | [
"MIT"
] | null | null | null | #!/usr/bin/env python
import selenium, time, os, platform
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from getpass import getpass
import info
username = info.username
pw = info.password
if username =="":
username = input("Enter your BU username:")
if pw =="":
pw = getpass("Enter your password:")
#check operating system (windows or mac)
osys = platform.system()
if osys == 'Windows':
# PATH = "C:\Program Files (x86)\ChromeDriver\chromedriver.exe"
# web = webdriver.Chrome(PATH)
web = webdriver.Chrome('chromedriver.exe')
elif osys == 'Darwin':
web = webdriver.Chrome()
# make sure this path is correct
start = time.time()
web.get('https://www.bu.edu/shs/getting-started/using-patient-connect/')
wait = WebDriverWait(web,10)
wait.until(EC.element_to_be_clickable((By.XPATH, '//*[@id="post-7513"]/p[2]/a'))).click()
#switch tabs
web.switch_to.window(web.window_handles[1])
#Log in Screen
username_input = wait.until(EC.presence_of_element_located((By.XPATH, '//*[@id="j_username"]')))
username_input.send_keys(username)
pw_input = web.find_element_by_xpath('//*[@id="j_password"]')
pw_input.send_keys(pw)
login_button = web.find_element_by_xpath('/html/body/div[1]/div/form/button')
login_button.click()
print("Logging in...")
# # #Click Survey Button
survey_button = wait.until(
EC.element_to_be_clickable((By.LINK_TEXT, 'Complete Survey'))
)
survey_button.click()
#Click Continue Button
time.sleep(0.5)
continue_button = wait.until(
EC.presence_of_element_located((By.XPATH,'//*[@id="mainbody"]/div[2]/div[1]/div/div[2]/a'))
)
continue_button.click()
#Survey Questions
time.sleep(0.5)
web.find_element_by_xpath('//*[@id="mainbody"]/main/form/div[2]/fieldset/div/div[1]/div').click()
web.find_element_by_xpath('//*[@id="mainbody"]/main/form/div[3]/fieldset/div/div[1]/div').click()
web.find_element_by_xpath('//*[@id="mainbody"]/main/form/div[4]/fieldset/div/div[1]/div').click()
web.find_element_by_xpath('//*[@id="mainbody"]/main/form/div[5]/fieldset/div/div[1]/div').click()
web.find_element_by_xpath('//*[@id="mainbody"]/main/form/div[6]/fieldset/div/div[1]/div').click()
web.find_element_by_xpath('//*[@id="mainbody"]/main/form/div[7]/fieldset/div/div[1]/div').click()
web.find_element_by_xpath('//*[@id="mainbody"]/main/form/div[8]/fieldset/div/div[1]/div').click()
web.find_element_by_xpath('//*[@id="mainbody"]/main/form/div[9]/fieldset/div/div[1]/div').click()
print('Survey Complete')
#Submit Survey
web.find_element_by_xpath('//*[@id="mainbody"]/footer/div/div[2]/input').click()
print('Survey Submitted')
print('This took: %.2f seconds' % (time.time()-start))
| 32.931034 | 103 | 0.713089 | 435 | 2,865 | 4.551724 | 0.289655 | 0.049495 | 0.059091 | 0.088889 | 0.378283 | 0.367677 | 0.344444 | 0.328788 | 0.295455 | 0.295455 | 0 | 0.013857 | 0.093194 | 2,865 | 86 | 104 | 33.313953 | 0.748268 | 0.096684 | 0 | 0.037736 | 0 | 0.150943 | 0.344078 | 0.260583 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.075472 | 0.150943 | 0 | 0.150943 | 0.075472 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
2337e355350c7783ccfa733784a10084a8ef256b | 13,186 | py | Python | RTI_3D.py | S8aVv/PADAR | 73ffca401fef1f83cccf6f381d03fe4c725baa8b | [
"BSD-2-Clause"
] | 1 | 2019-08-02T17:25:05.000Z | 2019-08-02T17:25:05.000Z | RTI_3D.py | S8aVv/PADAR | 73ffca401fef1f83cccf6f381d03fe4c725baa8b | [
"BSD-2-Clause"
] | null | null | null | RTI_3D.py | S8aVv/PADAR | 73ffca401fef1f83cccf6f381d03fe4c725baa8b | [
"BSD-2-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Wed Nov 16 15:31:16 2016
@author: shaw
"""
import xlrd
import matplotlib.pyplot as plt
from matplotlib.ticker import MultipleLocator, FormatStrFormatter
import numpy as np
from pylab import *
from itertools import product
from matplotlib.colors import LogNorm
#import gc
def config():
# gc.collect()
m = 2
n = 2
length = 230
wide = 200
cte = m / 2
num_l = length / m
num_w = wide / n
square = length * wide
pixel = m * n
j = int(square / pixel)
x1 = {1: 10, 2: 30, 3: 55, 4: 75, 5: 145, 6: 165, 7: 100, 8: 120, 9: 10, 10: 30,
11: 55, 12: 75, 13: 145, 14: 165, 15: 100, 16: 120, 17: 10, 18: 30, 19: 55, 20: 75,
21: 145, 22: 165, 23: 100, 24: 120, 25: 10, 26: 30, 27: 55, 28: 75, 29: 145, 30: 165,
31: 100, 32: 120, 33: 10, 34: 30, 35: 55, 36: 75, 37: 145, 38: 165, 39: 100, 40: 120,
41: 100, 42: 120, 43: 10, 44: 90, 45: 55, 46: 75, 47: 145, 48: 165, 49: 100, 50: 120,
51: 10, 52: 30, 53: 55, 54: 75, 55: 145, 56: 165, 57: 100, 58: 120, 59: 10, 60: 30,
61: 55, 62: 75, 63: 145, 64: 165, 65: 100, 66: 120, 67: 10, 68: 30, 69: 55, 70: 75,
71: 145, 72: 165, 73: 100, 74: 120, 75: 10, 76: 30, 77: 55, 78: 75, 79: 145, 80: 165,
97: 190, 99: 210, 100: 190, 101: 210, 102: 190, 103: 210, 104: 190, 105: 210, 106: 190, 107: 210,
108: 190, 109: 210, 110: 190, 112: 210, 113: 190, 114: 210, 115: 190, 116: 210, 117: 190, 118: 210}
y1 = {1: 190, 2: 190, 3: 190, 4: 190, 5: 90, 6: 90, 7: 190, 8: 190, 9: 170, 10: 170,
11: 170, 12: 170, 13: 70, 14: 70, 15: 170, 16: 170, 17: 150, 18: 150, 19: 150, 20: 150,
21: 50, 22: 50, 23: 150, 24: 150, 25: 130, 26: 130, 27: 130, 28: 130, 29:30, 30: 30,
31: 130, 32: 130, 33: 110, 34: 110, 35: 110, 36: 110, 37: 10, 38: 10, 39: 110, 40: 110,
41: 90, 42: 90, 43: 90, 44: 90, 45: 90, 46: 90, 47: 190, 48: 190, 49: 70, 50: 70,
51: 70, 52: 70, 53: 70, 54: 70, 55: 170, 56: 170, 57: 50, 58: 50, 59:50, 60: 50,
61: 50, 62: 50, 63: 150, 64: 150, 65: 30, 66: 30, 67: 30, 68: 30, 69: 30, 70: 30,
71: 130, 72: 130, 73: 10, 74: 10, 75: 10, 76: 10, 77: 10, 78: 10 , 79: 110, 80: 110,
97: 190, 99: 190, 100: 170, 101: 170, 102: 150, 103: 150, 104: 130, 105: 130, 106: 110, 107: 110,
108: 90, 109: 90, 110: 70, 112: 70, 113: 50, 114: 50, 115: 30, 116: 30, 117: 10, 118: 10}
return j, num_l, num_w, m, n, cte, x1, y1
def center_voxel(j,cte,num_l, num_w, m, n):
t = [[0 for col in range(2)] for row in range(j)]
count = 0
k = 0
l = 0
while(count < j):
tmp_y = cte + m * k
tmp_x = cte + n * l
if(l < num_l):
if(k < num_w):
t[count] = [tmp_x, tmp_y]
count += 1
k += 1
else:
k = 0
l += 1
v = np.array(t)
return v
def vector(path1, path2, x1, y1, v,j):
table1 = xlrd.open_workbook(path1)
table2 = xlrd.open_workbook(path2)
sh1 = table1.sheet_by_index(0)
sh2 = table2.sheet_by_index(0)
r1 = 1
r2 = 1
y = []
epc_index1 = 0
epc_index2 = 0
ante_index1 = 0
ante_index2 = 0
rssi1 = 0.0
rssi2 = 0.0
count = 0
num = 0
p = 10
a = []
while((r1+1 <= sh1.nrows-1)and(r2+1 <= sh2.nrows-1)):
epc_index1 = int(sh1.cell_value(r1, 0))
epc_index2 = int(sh2.cell_value(r2, 0))
epc_next1 = int(sh1.cell_value(r1+1, 0))
epc_next2 = int(sh2.cell_value(r2+1, 0))
ante_index1 = int(sh1.cell_value(r1, 1))
ante_index2 = int(sh2.cell_value(r2, 1))
ante_next1 = int(sh1.cell_value(r1+1, 1))
ante_next2 = int(sh2.cell_value(r2+1, 1))
if((epc_index1 == epc_index2) and (epc_index1 == epc_next1) and (epc_index2 == epc_next2)):
if(ante_index1 == ante_index2):
if(ante_index1 == ante_next1 and ante_index2 == ante_next2):
r1 = r1 + 1
r2 = r2 + 1
elif(ante_index1 == ante_next1 and ante_index2 != ante_next2):
r1 = r1 + 1
rssi1 = float(sh1.cell_value(r1, 6))
rssi2 = float(sh2.cell_value(r2, 6))
i = abs(rssi1 - rssi2)
y.append(i)
count = count + 1
for num in range(j):
d = ((x1[epc_index1]- v[num][0])**2+(y1[epc_index1] - v[num][1])**2)**0.5
if(d < p):
a.append(1)
else:
a.append(0)
elif(ante_index1 != ante_next1 and ante_index2 == ante_next2):
r2 = r2 + 1
rssi1 = float(sh1.cell_value(r1, 6))
rssi2 = float(sh2.cell_value(r2, 6))
i = abs(rssi1 - rssi2)
y.append(i)
count = count + 1
for num in range(j):
d = ((x1[epc_index1]- v[num][0])**2+(y1[epc_index1] - v[num][1])**2)**0.5
if(d < p):
a.append(1)
else:
a.append(0)
elif(ante_index1 != ante_next1 and ante_index2 != ante_next2):
num = 0
rssi1 = float(sh1.cell_value(r1, 6))
rssi2 = float(sh2.cell_value(r2, 6))
i = abs(rssi1 - rssi2)
y.append(i)
count = count + 1
for num in range(j):
# print y[epc_index1]
d = ((x1[epc_index1]- v[num][0])**2+(y1[epc_index1] - v[num][1])**2)**0.5
if(d < p):
a.append(1)
else:
a.append(0)
r1 = r1 + 1
r2 = r2 + 1
elif(ante_index1 > ante_index2):
if(ante_index1 != ante_next1 and ante_index2 != ante_next2):
rssi1 = float(sh1.cell_value(r1, 6))
rssi2 = -80
i = abs(rssi1 - rssi2)
y.append(i)
r2 = r2 +1
count = count + 1
for num in range(j):
d = ((x1[epc_index1]- v[num][0])**2+(y1[epc_index1] - v[num][1])**2)**0.5
if(d < p):
a.append(1)
else:
a.append(0)
else:
r2 = r2 + 1
elif(ante_index2 > ante_index1):
if(ante_index2 != ante_next2 and ante_index1 != ante_next1):
rssi1 = -80
rssi2 = float(sh2.cell_value(r2, 6))
i = abs(rssi1 - rssi2)
y.append(i)
r1 = r1 + 1
count = count + 1
for num in range(j):
d = ((x1[epc_index1]- v[num][0])**2+(y1[epc_index1] - v[num][1])**2)**0.5
if(d < p):
a.append(1)
else:
a.append(0)
else:
r1 = r1 + 1
elif((epc_index1 == epc_index2) and((epc_index1 != epc_next1) or(epc_index2 != epc_next2))):
if(ante_index1 == ante_index2):
rssi1 = float(sh1.cell_value(r1, 6))
rssi2 = float(sh2.cell_value(r2, 6))
i = abs(rssi1 - rssi2)
y.append(i)
count = count + 1
for num in range(j):
d = ((x1[epc_index1]- v[num][0])**2+(y1[epc_index1] - v[num][1])**2)**0.5
if(d < p):
a.append(1)
else:
a.append(0)
elif(ante_index1 > ante_index2):
rssi1 = float(sh1.cell_value(r1, 6))
rssi2 = -80
i = abs(rssi1 - rssi2)
y.append(i)
count = count + 1
for num in range(j):
d = ((x1[epc_index1]- v[num][0])**2+(y1[epc_index1] - v[num][1])**2)**0.5
if(d < p):
a.append(1)
else:
a.append(0)
elif(ante_index2 > ante_index1):
rssi1 = -80
rssi2 = float(sh2.cell_value(r2, 6))
i = abs(rssi1 - rssi2)
y.append(i)
count = count + 1
for num in range(j):
d = ((x1[epc_index1]- v[num][0])**2+(y1[epc_index1] - v[num][1])**2)**0.5
if(d < p):
a.append(1)
else:
a.append(0)
# elif()
r1 = r1 + 1
r2 = r2 + 1
elif(epc_index1 > epc_index2):
epc_before1 = int(sh1.cell_value(r1-1, 0))
epc_before2 = int(sh2.cell_value(r2-1, 0))
if(epc_before1 != epc_index2 and epc_index1 != epc_next2):
rssi1 = -80
rssi2 = float(sh2.cell_value(r2, 6))
i = abs(rssi1 - rssi2)
y.append(i)
r2 = r2 + 1
count = count + 1
for num in range(j):
d = ((x1[epc_index2]- v[num][0])**2+(y1[epc_index2] - v[num][1])**2)**0.5
if(d < p):
a.append(1)
else:
a.append(0)
else:
r2 = r2 + 1
elif(epc_index2 > epc_index1):
epc_before1 = int(sh1.cell_value(r1-1, 0))
epc_before2 = int(sh2.cell_value(r2-1, 0))
if(epc_before2 != epc_index1 and epc_index2 != epc_next1):
rssi1 = float(sh1.cell_value(r1, 6))
rssi2 = -80
i = abs(rssi1 - rssi2)
y.append(i)
r1 = r1 + 1
count = count + 1
for num in range(j):
d = ((x1[epc_index1]- v[num][0])**2+(y1[epc_index1] - v[num][1])**2)**0.5
if(d < p):
a.append(1)
else:
a.append(0)
else:
r1 = r1 + 1
# print count
b = np.array(a)
w = b.reshape(count, j)
# print b.size
# print w
# z = np.dot(w.T, w)
# print z
# q = np.identity(j)
t= np.mat(y)
# l = 40
# u = l * np.dot(q.T, q)
# print u
# o = np.mat((z + u))
# print o
# e = np.dot(w.T, t.T)
# print e
h = np.dot(w.T , t.T)
x = h.getA()
# f = open('C:/Users/songkai/Desktop/record/new/x.txt', 'w')
# for n in range(len(x[0])):
# f.write(x[0][n])
# f.close()
# plt.hist2d(230, 200, bins=40,weight = x)
# plt.colorbar()
# plt.show()
return x
#def vector_x(w, y,j):
# q = np.identity(j)
# t= np.matrix(y)
# print t
# l = 40
# x = (w.T * w + l * q.T * q).I * w.T * t.T
# count_length = length / n
# count_wide = wide / m
# x = t.reshape([count_wide, count_length])
# return x
def draw(v,x):
plt.xlim(0,230)
plt.ylim(0,200)
ax = plt.gca()
# ax.xaxis.set_minor_locator(MultipleLocator(2))
# ax.yaxis.set_minor_locator(MultipleLocator(2))
ax.xaxis.set_major_locator(MultipleLocator(20))
ax.yaxis.set_major_locator(MultipleLocator(20))
# plt.grid()
# plt.show()
# plt.imshow(x, extent=[0,230,0,200])
# plt.imsave('pic.jpg', 'JPG')
# pp1=amap(lambda ra: [ra[0],ra[1]],product(arange(0,230,2),arange(0,200,2)))
scatter(v[:,0],v[:,1],c=x, edgecolor="none")
return 0
def main():
origin = 'C:/Users/shaw/Desktop/record/new/origin_2.4m.xlsx'
stand1 = 'C:/Users/shaw/Desktop/record/new/stand1_0.6m.xlsx'
path1 = origin
path2 = stand1
(j, num_l, num_w, m, n, cte, x1, y1) = config()
v = center_voxel(j,cte,num_l, num_w, m, n)
x = vector(path1, path2, x1, y1, v,j)
# x = vector_x(w, y,j)
# print x
draw(v, x)
# show()
if __name__ == '__main__':
main() | 37.460227 | 109 | 0.409298 | 1,795 | 13,186 | 2.911421 | 0.153203 | 0.049943 | 0.034443 | 0.044776 | 0.532147 | 0.508611 | 0.447953 | 0.422503 | 0.418293 | 0.396479 | 0 | 0.202824 | 0.452222 | 13,186 | 352 | 110 | 37.460227 | 0.520698 | 0.087138 | 0 | 0.557252 | 0 | 0 | 0.009454 | 0.008423 | 0 | 0 | 0 | 0 | 0 | 1 | 0.019084 | false | 0 | 0.026718 | 0 | 0.061069 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
23455d865e39a91cb4714b52b2c1ae8decc14395 | 2,046 | py | Python | pak_helpers/helper/models.py | aliraza401/pak_helpers | 56e3d3214e318e28d344888c55d83dc69729d7a2 | [
"MIT"
] | null | null | null | pak_helpers/helper/models.py | aliraza401/pak_helpers | 56e3d3214e318e28d344888c55d83dc69729d7a2 | [
"MIT"
] | null | null | null | pak_helpers/helper/models.py | aliraza401/pak_helpers | 56e3d3214e318e28d344888c55d83dc69729d7a2 | [
"MIT"
] | null | null | null | from django.db import models
from django.contrib.auth.models import User
from PIL import Image
from django import template
from django.contrib.auth.models import Group
class Helper(models.Model):
option = (
('Male', 'Male'),
('Female', 'Female')
)
user = models.OneToOneField(User, on_delete=models.CASCADE, null=True)
profile_picture = models.ImageField(
upload_to='helper_img', null=True, blank=True, default='helper_img/def_img.png')
contact_number = models.CharField(max_length=13)
cnic = models.CharField(max_length=15)
gender = models.CharField(max_length=10, choices=option, default='Male')
description = models.TextField(max_length=5000)
daily_work_rate = models.IntegerField()
profile_visible = models.BooleanField(null=True)
age = models.SmallIntegerField(null=True)
category = models.ManyToManyField('Category',null=True, blank=True)
province = models.CharField(max_length=50, null=True, blank=True)
city = models.CharField(max_length=50, null=True, blank=True)
area = models.CharField(max_length=50, null=True, blank=True)
rating = models.IntegerField(null=True)
def __str__(self):
return self.user.username
def save(self, *args, **kwargs):
super().save(*args, **kwargs)
img = Image.open(self.profile_picture.path)
output_size = (500, 500)
img.thumbnail(output_size)
img.save(self.profile_picture.path)
class Rewiew(models.Model):
user = models.ForeignKey(Helper, on_delete=models.SET_NULL, null=True)
star = models.IntegerField()
comment = models.TextField(max_length=1500, default='.')
def __str__(self):
return self.user.user.username
class Category(models.Model):
name = models.CharField(max_length=50)
def __str__(self):
return self.name
class Sub_Category(models.Model):
category = models.ForeignKey(Category, on_delete=models.CASCADE)
name = models.CharField(max_length=50)
def __str__(self):
return self.name
| 33 | 88 | 0.703812 | 263 | 2,046 | 5.307985 | 0.323194 | 0.057307 | 0.103152 | 0.137536 | 0.251433 | 0.251433 | 0.169771 | 0.169771 | 0.169771 | 0.077364 | 0 | 0.017825 | 0.177419 | 2,046 | 61 | 89 | 33.540984 | 0.811646 | 0 | 0 | 0.166667 | 0 | 0 | 0.031769 | 0.010753 | 0 | 0 | 0 | 0 | 0 | 1 | 0.104167 | false | 0 | 0.104167 | 0.083333 | 0.8125 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
23495672b8fbff4e239fbe9d573f062e637434e7 | 192 | py | Python | GCD.py | griledchicken/VAMPY-2017-CS | 1bc71734751850b580b481eac51c5c235d0ca9e2 | [
"MIT"
] | null | null | null | GCD.py | griledchicken/VAMPY-2017-CS | 1bc71734751850b580b481eac51c5c235d0ca9e2 | [
"MIT"
] | null | null | null | GCD.py | griledchicken/VAMPY-2017-CS | 1bc71734751850b580b481eac51c5c235d0ca9e2 | [
"MIT"
] | null | null | null | import functools
@functools.lru_cache(maxsize=None)
def gcd(a, b):
if a < b:
a, b = b, a
def solver(a, b):
if b == 0:
return a
else:
return solver(b, a%b)
return solver(a, b)
| 13.714286 | 34 | 0.598958 | 37 | 192 | 3.081081 | 0.405405 | 0.105263 | 0.070175 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.006944 | 0.25 | 192 | 13 | 35 | 14.769231 | 0.784722 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.181818 | false | 0 | 0.090909 | 0 | 0.545455 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
234df9039a8b223ac49eb73a2a790c39bbcab512 | 2,564 | py | Python | src/move_files.py | knjk04/file-utils | dff4a67bacf36d4560423095fc30f7ce655531fd | [
"MIT"
] | null | null | null | src/move_files.py | knjk04/file-utils | dff4a67bacf36d4560423095fc30f7ce655531fd | [
"MIT"
] | 4 | 2021-09-12T15:55:09.000Z | 2021-09-23T18:41:33.000Z | src/move_files.py | knjk04/file-utils | dff4a67bacf36d4560423095fc30f7ce655531fd | [
"MIT"
] | null | null | null | import os
import platform
import sys
from os import listdir
from pathlib import Path
from src.create_dir import create_numbered_dirs, get_parent_dir
from src.validate_windows_file_name import is_valid_windows_file_name
def get_files_in(dir: str):
"""Returns a list of absolute paths to files sorted alphabetically in the specified folder"""
# TODO: ignore folders
# TODO: use OS independent path separator
return [dir + '\\' + file for file in listdir(dir) if os.path.isfile(os.path.join(dir, file))]
def create_new_dirs(parent_dir: str, number_of_dirs: int):
start = 1
return create_numbered_dirs(parent_dir, start, number_of_dirs)
def move_files(path_to_files: [str], destinations: [str], new_file_name: str):
"""
Assumes that every file in the parameter files is an absolute path to a file.
Also assumes that both parameters are non-empty lists
new_file_name does not change the file extension
"""
for index, source_path in enumerate(path_to_files):
original_file_name = os.path.basename(source_path)
file_extension = Path(original_file_name).suffix
# don't want to set this expression to new_file_name because it will overwrite the value
# and affect subsequent iterations of the loop
new_file_name2 = new_file_name + file_extension
dest = os.path.join(destinations[index], new_file_name2)
os.rename(source_path, dest)
print(f'Moved {original_file_name}...')
def ask_for_file_name():
question = 'What do you want to to use for the new file names ?'
if platform.system() == "Windows":
ask_for_file_name_on_windows(question)
print(question, sep='')
return input()
def ask_for_file_name_on_windows(question: str):
for i in range(3):
print(question, sep='')
file_name = input()
if is_valid_windows_file_name(file_name):
return file_name
print('Invalid file name. Try again.')
print('Error: you did not enter in a valid name after 3 attempts. Exiting...')
sys.exit()
def main():
parent_dir = get_parent_dir()
files = get_files_in(parent_dir)
if not files:
print(f'Cannot find any files in {parent_dir}')
sys.exit()
new_dirs = create_new_dirs(parent_dir, len(files))
if not new_dirs:
print(f'Could not create new directories. Perhaps those directories already exist in '
f'{parent_dir}?')
sys.exit()
move_files(files, new_dirs, ask_for_file_name())
if __name__ == '__main__':
main()
| 33.298701 | 98 | 0.698518 | 386 | 2,564 | 4.38601 | 0.34456 | 0.085056 | 0.025989 | 0.033077 | 0.100413 | 0.036621 | 0.036621 | 0 | 0 | 0 | 0 | 0.002486 | 0.215679 | 2,564 | 76 | 99 | 33.736842 | 0.839383 | 0.180187 | 0 | 0.102041 | 0 | 0 | 0.155556 | 0.011111 | 0 | 0 | 0 | 0.013158 | 0 | 1 | 0.122449 | false | 0 | 0.142857 | 0 | 0.346939 | 0.142857 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
2357b9c0df8d3b9bc393419608c2a76568db9537 | 192 | py | Python | 1607.py | Ananya2000-byte/SimplePythonCodes | dd80dd6876d2fd9a62039d504706d594b143a16e | [
"Apache-2.0"
] | null | null | null | 1607.py | Ananya2000-byte/SimplePythonCodes | dd80dd6876d2fd9a62039d504706d594b143a16e | [
"Apache-2.0"
] | null | null | null | 1607.py | Ananya2000-byte/SimplePythonCodes | dd80dd6876d2fd9a62039d504706d594b143a16e | [
"Apache-2.0"
] | null | null | null | I = input("Enter the string: ")
S = I.upper()
freq = {}
for i in I:
if i != " ":
if i in freq:
freq[i] += 1
else:
freq[i] = 1
print(freq)
| 17.454545 | 32 | 0.390625 | 28 | 192 | 2.678571 | 0.5 | 0.08 | 0.106667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.019048 | 0.453125 | 192 | 10 | 33 | 19.2 | 0.695238 | 0 | 0 | 0 | 0 | 0 | 0.104396 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.1 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
236809d3a7b86fc366416315b2c82cbd47d00703 | 1,117 | py | Python | plateypus/models.py | Geologik/plateypus-backend | 9ff1dd959d5a654895757b183598a4f6529936cf | [
"MIT"
] | null | null | null | plateypus/models.py | Geologik/plateypus-backend | 9ff1dd959d5a654895757b183598a4f6529936cf | [
"MIT"
] | 189 | 2018-10-26T21:17:27.000Z | 2022-03-28T16:16:28.000Z | plateypus/models.py | Geologik/plateypus-backend | 9ff1dd959d5a654895757b183598a4f6529936cf | [
"MIT"
] | null | null | null | """ODM model definitions."""
from elasticsearch_dsl import Date, Document, Keyword, Text
try: # pragma: no cover
from helpers import elastic
except (ImportError, ModuleNotFoundError): # pragma: no cover
from plateypus.helpers import elastic
INDEX_METADATA = "plateypus-metadata"
INDEX_VEHICLES = "plateypus-vehicles"
class Metadata(Document):
"""Represents metadata for the application state."""
country = Keyword(required=True)
last_updated = Date()
class Index: # pylint: disable=missing-docstring,too-few-public-methods
name = INDEX_METADATA
class Vehicle(Document):
"""Represents a vehicle."""
country = Keyword(required=True)
plate = Text(required=True)
first_reg = Text()
vin = Text()
maker = Text()
model = Text()
fuel_type = Text()
colour = Text()
raw_xml = Text()
class Index: # pylint: disable=missing-docstring,too-few-public-methods
name = INDEX_VEHICLES
if __name__ == "__main__": # pragma: no cover
with elastic() as client:
Metadata.init(using=client)
Vehicle.init(using=client)
| 24.282609 | 76 | 0.680394 | 129 | 1,117 | 5.75969 | 0.488372 | 0.032301 | 0.05249 | 0.04576 | 0.18035 | 0.18035 | 0.18035 | 0.18035 | 0.18035 | 0.18035 | 0 | 0 | 0.210385 | 1,117 | 45 | 77 | 24.822222 | 0.842404 | 0.230081 | 0 | 0.142857 | 0 | 0 | 0.052381 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.142857 | 0 | 0.678571 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
236ade1e78db590aaee6ca5dbfe921386aeb16a6 | 3,509 | py | Python | webapp.py | prochor666/ctrl | bb6bef2dd8e0690f632be4990e8564bfe4c1e859 | [
"MIT"
] | null | null | null | webapp.py | prochor666/ctrl | bb6bef2dd8e0690f632be4990e8564bfe4c1e859 | [
"MIT"
] | null | null | null | webapp.py | prochor666/ctrl | bb6bef2dd8e0690f632be4990e8564bfe4c1e859 | [
"MIT"
] | null | null | null | from core.config import app_config
import json
import datetime
import logging
import os
from flask import Flask, render_template, Response, request, send_from_directory
from core import compat, app, utils
from core.ctrl import api, auth
compat.check_version()
webapp = Flask(__name__)
app.mode = 'http'
@webapp.route('/')
def index():
if request.headers.get('X-Forwarded-For') != None:
app.config['client_ip'] = request.headers.get('X-Forwarded-For')
else:
app.config['client_ip'] = request.remote_addr
if request.headers.get('X-Real-Ip') != None:
app.config['client_ip'] = request.headers.get('X-Real-Ip')
return render_template('index.html', config=app.config)
@webapp.route('/api/')
@webapp.route('/api/<path:api_method>', methods=['POST', 'GET'])
def respond(api_method=None):
if request.headers.get('X-Forwarded-For') != None:
app.config['client_ip'] = request.headers.get('X-Forwarded-For')
else:
app.config['client_ip'] = request.remote_addr
if request.headers.get('X-Real-Ip') != None:
app.config['client_ip'] = request.headers.get('X-Real-Ip')
api_method = str(api_method).replace('/', '')
reason = f"API route {api_method} is not supported"
module_status = False
result = None
request_method = "Unknown"
if api_method != None and api_method in dir(api):
reason = f"API route: {api_method}"
data_pass = {}
if request.method == 'POST':
request_method = 'POST'
if request.headers.get('Content-type') != None and request.headers.get('Content-type').startswith('application/json'):
request_method = 'POST-JSON'
data_pass = request.get_json()
else:
data_pass = request.form
else:
request_method = 'GET'
data_pass = request.args
data_pass = dict(data_pass)
logged = auth.authorization_process(api_method)
result = logged
app.config['user'] = result
if logged['status'] == True:
# Start api request passing
module_status = True
if api_method != 'login':
result = getattr(api, api_method)(data_pass)
res = json.dumps({
'api': f"{app.config['full_name']} REST api 1.0",
'module_status': module_status,
'request_method': request_method,
'reason': reason,
'result': result
})
return Response(res, mimetype='application/json')
@webapp.route('/resource/')
@webapp.route('/resource/<path:resource_name>')
def get_resource(resource_name=None):
resource_name = str(resource_name).replace('/', '')
resource_dir = app.config['filesystem']['resources'].replace('/', os.path.sep)
if resource_name != None:
try:
return send_from_directory(directory=resource_dir, path=resource_name)
except Exception as error:
return Response(f"Resource {resource_name} not found.", mimetype='text/plain')
return Response(f"Resource not defined.", mimetype='text/plain')
if __name__ == '__main__':
today = datetime.date.today()
logging.basicConfig(
filename=f"storage/logs/ctrl-server-{today.strftime('%Y-%m')}.log", level=logging.INFO, format='%(asctime)s %(message)s')
# Open, any host allowed
webapp.run(debug=True, host='0.0.0.0', port='5007')
# Secure, only localhost allowed
# webapp.run(debug=True, host='127.0.0.1', port='5007')
| 31.053097 | 130 | 0.634939 | 447 | 3,509 | 4.829978 | 0.297539 | 0.045855 | 0.07874 | 0.066698 | 0.252895 | 0.226957 | 0.17786 | 0.17786 | 0.17786 | 0.17786 | 0 | 0.007302 | 0.219436 | 3,509 | 112 | 131 | 31.330357 | 0.780942 | 0.037903 | 0 | 0.177215 | 0 | 0 | 0.199644 | 0.038861 | 0 | 0 | 0 | 0 | 0 | 1 | 0.037975 | false | 0.075949 | 0.101266 | 0 | 0.202532 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
2373aa9fe773ff2b3b5a27d1f60285a4da55ba1c | 257 | py | Python | dcorm/ordering.py | homeinfogmbh/dcorm | 7dc19259dad5252afea4b574a533240506b524d1 | [
"MIT"
] | 2 | 2021-12-23T13:49:26.000Z | 2021-12-23T15:16:46.000Z | dcorm/ordering.py | homeinfogmbh/dcorm | 7dc19259dad5252afea4b574a533240506b524d1 | [
"MIT"
] | null | null | null | dcorm/ordering.py | homeinfogmbh/dcorm | 7dc19259dad5252afea4b574a533240506b524d1 | [
"MIT"
] | null | null | null | """Ordering type definition."""
from enum import Enum
from dcorm.literal import Literal
__all__ = ['Ordering']
class Ordering(Enum):
"""Available orderings."""
ASC = Literal('ASC', space_left=True)
DESC = Literal('DESC', space_left=True)
| 16.0625 | 43 | 0.680934 | 31 | 257 | 5.451613 | 0.548387 | 0.106509 | 0.153846 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.175097 | 257 | 15 | 44 | 17.133333 | 0.79717 | 0.178988 | 0 | 0 | 0 | 0 | 0.075 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.333333 | 0 | 0.833333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
237bb11de5d3c4f72ec5ee838e1abe1a40edea11 | 1,431 | py | Python | LeetCode/Problems/15. 3Sum.py | nikku1234/Code-Practise | 94eb6680ea36efd10856c377000219285f77e5a4 | [
"Apache-2.0"
] | 9 | 2020-07-02T06:06:17.000Z | 2022-02-26T11:08:09.000Z | LeetCode/Problems/15. 3Sum.py | nikku1234/Code-Practise | 94eb6680ea36efd10856c377000219285f77e5a4 | [
"Apache-2.0"
] | 1 | 2021-11-04T17:26:36.000Z | 2021-11-04T17:26:36.000Z | LeetCode/Problems/15. 3Sum.py | nikku1234/Code-Practise | 94eb6680ea36efd10856c377000219285f77e5a4 | [
"Apache-2.0"
] | 8 | 2021-01-31T10:31:12.000Z | 2022-03-13T09:15:55.000Z | class Solution(object):
def threeSum(self, nums):
"""
:type nums: List[int]
:rtype: List[List[int]]
"""
nums.sort()
res = []
for i,a in enumerate(nums):
# If same as the previous value just continue, already checked
if i>0 and a == nums[i-1]:
continue
# Using left and right pointers
l,r = i+1, len(nums)-1
while l < r:
threeSum = a + nums[l] + nums[r]
if threeSum > 0:
r -= 1
elif threeSum < 0:
l += 1
else:
res.append([a,nums[l],nums[r]])
l += 1
# Making sure that left pointer is not same as its previous value
while nums[l] == nums[l-1] and l<r:
l += 1
return res
# Time Limit Exceed
# res = []
# if len(nums) < 3:
# return res
# nums.sort()
# for i in range(0,len(nums)):
# temp = 0
# for j in range(i+1,len(nums)):
# temp = nums[i] + nums[j]
# temp = temp * -1
# if temp in nums[j+1:]:
# res1 = [nums[i],nums[j],temp]
# if res1 not in res:
# res.append(res1)
# return res
| 29.8125 | 85 | 0.386443 | 168 | 1,431 | 3.291667 | 0.345238 | 0.050633 | 0.048825 | 0.03255 | 0.090416 | 0 | 0 | 0 | 0 | 0 | 0 | 0.02809 | 0.502446 | 1,431 | 47 | 86 | 30.446809 | 0.748596 | 0.467505 | 0 | 0.15 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.05 | false | 0 | 0 | 0 | 0.15 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
237c54e9431e8d639d8402119d1c8b345898e864 | 344 | py | Python | common/services/notice/NewsService.py | linkgeek/python_flask_cms | ff5e794b5b11075670e5d11a8cbda0a137319876 | [
"BSD-3-Clause"
] | 1 | 2022-03-20T06:34:48.000Z | 2022-03-20T06:34:48.000Z | common/services/notice/NewsService.py | linkgeek/python_flask_cms | ff5e794b5b11075670e5d11a8cbda0a137319876 | [
"BSD-3-Clause"
] | null | null | null | common/services/notice/NewsService.py | linkgeek/python_flask_cms | ff5e794b5b11075670e5d11a8cbda0a137319876 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
from common.models.notice.UserNews import UserNews
from common.services.BaseService import BaseService
from application import db
class NewsService(BaseService):
@staticmethod
def addNews(params):
model_user_news = UserNews(**params)
db.session.add(model_user_news)
db.session.commit()
| 26.461538 | 51 | 0.723837 | 41 | 344 | 5.97561 | 0.609756 | 0.081633 | 0.106122 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.003534 | 0.177326 | 344 | 12 | 52 | 28.666667 | 0.862191 | 0.061047 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.111111 | false | 0 | 0.333333 | 0 | 0.555556 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
237cbc5cdee57fecd08c8c4054e80ead984b6cca | 423 | py | Python | examples/garbage.py | cyberbeast/pympler | 0e02c8c72c1eef22beb8c18d6de19c128c4904ae | [
"Apache-2.0"
] | 862 | 2015-01-06T03:25:46.000Z | 2022-03-29T08:59:56.000Z | examples/garbage.py | cyberbeast/pympler | 0e02c8c72c1eef22beb8c18d6de19c128c4904ae | [
"Apache-2.0"
] | 119 | 2015-01-19T15:20:00.000Z | 2022-03-14T16:57:44.000Z | examples/garbage.py | cyberbeast/pympler | 0e02c8c72c1eef22beb8c18d6de19c128c4904ae | [
"Apache-2.0"
] | 80 | 2015-05-16T18:19:28.000Z | 2022-02-19T17:39:36.000Z |
from pympler.garbagegraph import start_debug_garbage
from pympler import web
class Leaf(object):
pass
class Branch(object):
def __init__(self, root):
self.root = root
self.leaf = Leaf()
class Root(object):
def __init__(self, num_branches):
self.branches = [Branch(self) for _ in range(num_branches)]
start_debug_garbage()
tree = Root(2)
del tree
web.start_profiler(debug=True)
| 16.269231 | 67 | 0.6974 | 58 | 423 | 4.810345 | 0.465517 | 0.078853 | 0.121864 | 0.121864 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.002976 | 0.205674 | 423 | 25 | 68 | 16.92 | 0.827381 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.133333 | false | 0.066667 | 0.133333 | 0 | 0.466667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
88c6dd6dace65e1ab9ab3ed2719d7832543eaf9d | 2,858 | py | Python | STS.py | RafalKucharskiPK/PTVVisum_Python_Snippets | 08700ac9ff8dd8d0db01ed9b65550a15105cff17 | [
"MIT"
] | 3 | 2020-02-14T19:43:23.000Z | 2021-04-26T06:39:33.000Z | STS.py | RafalKucharskiPK/PTVVisum_Python_Snippets | 08700ac9ff8dd8d0db01ed9b65550a15105cff17 | [
"MIT"
] | null | null | null | STS.py | RafalKucharskiPK/PTVVisum_Python_Snippets | 08700ac9ff8dd8d0db01ed9b65550a15105cff17 | [
"MIT"
] | 2 | 2020-05-03T13:53:05.000Z | 2020-10-13T17:11:02.000Z | import sqlite3
def VisumInit(path=None,COMAddress='Visum.Visum.125'):
"""
###
Automatic Plate Number Recognition Support
(c) 2012 Rafal Kucharski info@intelligent-infrastructure.eu
####
VISUM INIT
"""
import win32com.client
Visum = win32com.client.Dispatch(COMAddress)
if path != None: Visum.LoadVersion(path)
return Visum
def Init_DB():
def DB_Architecture(con,cur):
cur.execute("""create table Stops(Id_S INTEGER PRIMARY KEY)""")
cur.execute("""create table Lines(Id_L INTEGER PRIMARY KEY AUTOINCREMENT)""")
cur.execute("""create table Routes(Id_R INTEGER PRIMARY KEY AUTOINCREMENT,
Id_L INTEGER,
Ind INTEGER,
ID_S INTEGER,
FOREIGN KEY (Id_L) REFERENCES Lines(Id_L),
FOREIGN KEY (Id_S) REFERENCES Stops(Id_S)
)""")
cur.execute("""create table Times(From_Stop Integer,
To_Stop, Integer,
FOREIGN KEY (From_Stop) REFERENCES Stops(Id_S)
FOREIGN KEY (To_Stop) REFERENCES Stops(Id_S)
)""")
return con,cur
con = sqlite3.connect(":memory:")
cur = con.cursor()
cur.execute('pragma foreign_keys=ON')
con.text_factory = str
con.commit()
con,cur=DB_Architecture(con,cur)
return con,cur
def DB_Input(con,cur):
StopPoints = Visum.Net.StopPoints.GetMultipleAttributes(['NO'])
StopPoints=[[int(s[0])] for s in StopPoints]
cur.executemany('insert into Stops(ID_S) values (?)', StopPoints)
con.commit()
LR2DB=[]
LineRoutes=Visum.Net.LineRoutes.GetMultipleAttributes(["Concatenate:Stoppoints\No","Concatenate:Timeprofiles\Concatenate:Timeprofileitems\Preruntime"])
Lines=[]
for i,LR in enumerate(LineRoutes):
Lines.append([i])
Stops=LR[0].split(":")
Stops=[int(s) for s in Stops]
for j,Stop in enumerate(Stops):
LR2DB.append([i,j,Stop])
print Lines
cur.executemany('insert into Lines(ID_L) values (?)', tuple(Lines))
cur.executemany('insert into Routes(ID_L,Ind,ID_S) values (?,?,?)', LR2DB)
con.commit()
Visum=VisumInit("E:\KA.ver")
#
#StopsPoints=[int(s[1]) for s in Visum.Net.Stops.GetMultiAttValues("No")]
#LineRoutes=Visum.Net.LineRoutes.GetMultipleAttributes(["Concatenate:Stoppoints\No","Concatenate:Timeprofiles\Concatenate:Timeprofileitems\Preruntime"])
con,cur=Init_DB()
DB_Input(con,cur)
| 36.177215 | 156 | 0.555983 | 303 | 2,858 | 5.155116 | 0.320132 | 0.03073 | 0.025608 | 0.053777 | 0.234315 | 0.169014 | 0.169014 | 0.169014 | 0.169014 | 0.169014 | 0 | 0.009927 | 0.330301 | 2,858 | 78 | 157 | 36.641026 | 0.806165 | 0.078027 | 0 | 0.137255 | 0 | 0 | 0.47386 | 0.046006 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.039216 | null | null | 0.019608 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
88c8b86680eadd3cc826848a5b6787cc6bfcac47 | 1,945 | py | Python | src_aryan/bot1/scripts/ball_detection.py | iamprasann/UMIC_TEAM4-Final | b9fc6d6ad251baaaab547e03946fa4252b40e9d8 | [
"MIT"
] | null | null | null | src_aryan/bot1/scripts/ball_detection.py | iamprasann/UMIC_TEAM4-Final | b9fc6d6ad251baaaab547e03946fa4252b40e9d8 | [
"MIT"
] | null | null | null | src_aryan/bot1/scripts/ball_detection.py | iamprasann/UMIC_TEAM4-Final | b9fc6d6ad251baaaab547e03946fa4252b40e9d8 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
## Simple talker demo that listens to std_msgs/Strings published
## to the 'chatter' topic
import rospy
import cv2
import numpy as np
from std_msgs.msg import String
import matplotlib.pyplot as plt
from sensor_msgs.msg import Image
import cv2
from cv_bridge import CvBridge, CvBridgeError
import time
bridge = CvBridge()
#min area
min=30
#red[0] and green[1]
color_boundaries = [ ([0, 0, 220], [30, 30, 255]), ([0, 220, 0], [30, 255, 30])]
def image_callback(img_msg):
rospy.loginfo(img_msg.header)
try:
img = bridge.imgmsg_to_cv2(img_msg, "passthrough")
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
img = cv2.resize(img, (400,400))
classifier(img)
except CvBridgeError, e:
rospy.logerr("CvBridge Error: {0}".format(e))
def classifier(img):
masks = []
for (low, up) in color_boundaries:
low=np.array(low, dtype='uint8')
up=np.array(up, dtype='uint8')
mask=cv2.inRange(img, low, up)
op=cv2.bitwise_and(img, img, mask=mask)
masks.append(op)
red_zone = cv2.cvtColor(masks[0], cv2.COLOR_BGR2GRAY)
green_zone = cv2.cvtColor(masks[1], cv2.COLOR_BGR2GRAY)
_, red_zone = cv2.threshold(red_zone, 20, 255, cv2.THRESH_BINARY)
_, green_zone = cv2.threshold(green_zone, 20, 255, cv2.THRESH_BINARY)
_,contours_red, hierarchy = cv2.findContours(red_zone, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
_,contours_green, hierarchy = cv2.findContours(green_zone, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
for cnt in contours_red:
if(cv2.contourArea(cnt)>2*min):
print("Push the Red!")
for cnt in contours_green:
if(cv2.contourArea(cnt)>2*min):
print("Grab the Green")
def listener():
rospy.init_node('ball_detection',anonymous=True)
rospy.Subscriber("/mybot/camera1/image_raw", Image, image_callback)
rospy.spin()
### RUN ###
if __name__ == '__main__':
listener() | 26.643836 | 102 | 0.681234 | 284 | 1,945 | 4.482394 | 0.401408 | 0.032993 | 0.023566 | 0.031422 | 0.136685 | 0.136685 | 0.098979 | 0.054988 | 0 | 0 | 0 | 0.05 | 0.187661 | 1,945 | 73 | 103 | 26.643836 | 0.755696 | 0.070437 | 0 | 0.085106 | 0 | 0 | 0.062953 | 0.01337 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0.021277 | 0.191489 | null | null | 0.042553 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
88d4c79e58d07bb5e8ea6e1b3c253f42c56a36ef | 7,473 | py | Python | demo/lazy_numpy.py | markflorisson/minivect | d9a3743bea078382e9b16f54933bef3ba3a9d03c | [
"BSD-2-Clause"
] | 4 | 2015-10-02T18:25:24.000Z | 2022-02-12T16:09:11.000Z | demo/lazy_numpy.py | markflorisson/minivect | d9a3743bea078382e9b16f54933bef3ba3a9d03c | [
"BSD-2-Clause"
] | null | null | null | demo/lazy_numpy.py | markflorisson/minivect | d9a3743bea078382e9b16f54933bef3ba3a9d03c | [
"BSD-2-Clause"
] | null | null | null | """
Minimal library for lazy evaluation with NumPy. Uses minivect's LLVM backend
for evaluation.
"""
import sys
import time
import numpy as np
import miniast
import specializers
import minitypes
import codegen
import xmldumper
import treepath
from ctypes_conversion import get_data_pointer, convert_to_ctypes, get_pointer
context_debug = 0
class LazyLLVMContext(miniast.LLVMContext):
debug = context_debug
def stridesvar(self, variable):
return miniast.StridePointer(self.pos, minitypes.NPyIntp.pointer(),
variable)
context = LazyLLVMContext()
b = context.astbuilder
ccontext = miniast.CContext()
ccontext.debug = context_debug
func_counter = 0
def specialize(specializer_cls, ast, context=context):
specializers = [specializer_cls]
result = iter(context.run(ast, specializers)).next()
_, specialized_ast, _, code_output = result
return specialized_ast, code_output
def specialize_c(specializer_cls, ast):
specialized_ast, (proto, impl) = specialize(specializer_cls, ast, ccontext)
return specialized_ast, impl
class Lazy(object):
"""
Base class for lazy objects. We do not build a minivect AST immediately
since we need to support our operator overloads.
"""
def __init__(self, parent=None):
self.parent = parent
def map(self):
global func_counter
assert self.parent is None
variables = []
body = self._map(variables)
shapevar = b.variable(minitypes.NPyIntp().pointer(), 'shape')
func = b.build_function(variables, body, 'lazy%d' % func_counter,
shapevar=shapevar)
# TODO: select an appropriate specializer
# specializer = specializers.ContigSpecializer
specializer = specializers.StridedCInnerContigSpecializer
specialized_func, (llvm_func, ctypes_func) = specialize(specializer, func)
func_counter += 1
# print specialize_c(specializer, func)[1]
# print specialized_func.print_tree(context)
# print llvm_func
return ctypes_func, variables, specializer, llvm_func
def getfunc(self):
ctypes_func, variables, specializer, llvm_func = self.map()
fist_array = variables[0].value
shape = fist_array.shape
for variable in variables:
for dim, extent in enumerate(variable.value.shape):
if extent != shape[dim] and extent != 1:
raise ValueError("Differing extents in dim %d (%s, %s)" %
(dim, extent, shape[dim]))
args = [fist_array.ctypes.shape]
for variable in variables:
if variable.type.is_array:
numpy_array = variable.value
data_pointer = get_data_pointer(numpy_array, variable.type)
args.append(data_pointer)
if not specializer.is_contig_specializer:
args.append(numpy_array.ctypes.strides)
else:
raise NotImplementedError
return ctypes_func, args
def getpointer(self):
ctypes_func, variables, specializer, llvm_func = self.map()
return get_pointer(context, llvm_func)
def eval(self):
ctypes_func, args = self.getfunc()
return ctypes_func(*args)
def __add__(self, other):
return Binop("+", self, lazy_array(other))
def __mul__(self, other):
return Binop("*", self, lazy_array(other))
def __div__(self, other):
return Binop("/", self, lazy_array(other))
class Binop(Lazy):
def __init__(self, op, lhs, rhs):
super(Binop, self).__init__()
self.op = op
self.lhs, self.rhs = lhs, rhs
lhs.parent, rhs.parent = self, self
def _map(self, variables):
lhs, rhs = self.lhs._map(variables), self.rhs._map(variables)
assert lhs.type == rhs.type, (self.op, lhs.type, rhs.type) # no promotion
if self.op == '=':
return b.assign(lhs, rhs)
else:
return b.binop(lhs.type, self.op, lhs, rhs)
class LazyArray(Lazy):
def __init__(self, numpy_array):
super(LazyArray, self).__init__()
self.numpy_array = numpy_array
def _map(self, variables):
minidtype = minitypes.map_dtype(self.numpy_array.dtype)
array_type = minidtype[(slice(None),) * self.numpy_array.ndim]
array_type.broadcasting = tuple(
extent == 1 for extent in self.numpy_array.shape)
variable = b.variable(array_type, 'op%d' % len(variables))
variables.append(variable)
variable.value = self.numpy_array
return variable
def __setitem__(self, item, value):
if item is not Ellipsis:
if not isinstance(item, tuple):
item = (item,)
for s in item:
if not isinstance(s, slice):
raise NotImplementedError("Only full slices are supported")
elif s.start is not None or s.stop is not None or s.step is not None:
raise NotImplementedError("Only full slice assignment is supported")
lazy_result = Binop('=', self, value)
return lazy_result.eval()
def slice_assign(self, src):
lazy_result = Binop('=', self, src)
t = time.time()
ctypes_func, args = lazy_result.getfunc()
t = time.time() - t
print 'compilation time:', t
return ctypes_func, args
def lazy_array(numpy_array):
if isinstance(numpy_array, Lazy):
return numpy_array
return LazyArray(numpy_array)
def test():
"""
>>> test()
[[ 0. 2. 4. 6. 8. 10. 12. 14. 16. 18.]
[ 20. 22. 24. 26. 28. 30. 32. 34. 36. 38.]
[ 40. 42. 44. 46. 48. 50. 52. 54. 56. 58.]
[ 60. 62. 64. 66. 68. 70. 72. 74. 76. 78.]
[ 80. 82. 84. 86. 88. 90. 92. 94. 96. 98.]
[ 100. 102. 104. 106. 108. 110. 112. 114. 116. 118.]
[ 120. 122. 124. 126. 128. 130. 132. 134. 136. 138.]
[ 140. 142. 144. 146. 148. 150. 152. 154. 156. 158.]
[ 160. 162. 164. 166. 168. 170. 172. 174. 176. 178.]
[ 180. 182. 184. 186. 188. 190. 192. 194. 196. 198.]]
"""
a = np.arange(100, dtype=np.double).reshape(10, 10)
lazy_a = lazy_array(a)
lazy_a[:, :] = lazy_a + lazy_a
print a
def test2():
N = 200
i, j, k = np.ogrid[:N, :N, :N]
dtype = np.double
i, j, k = i.astype(dtype), j.astype(dtype), k.astype(dtype)
numpy_result = np.empty((N, N, N), dtype=dtype)
t = time.time()
numpy_result[...] = i * j * k
print time.time() - t
our_result = np.empty((N, N, N), dtype=dtype)
# Lazy evaluation slice assignment
# t = time.time()
lazy_dst = lazy_array(our_result)
lazy_i, lazy_j, lazy_k = lazy_array(i), lazy_array(j), lazy_array(k)
# lazy_dst[...] = lazy_i * lazy_j * lazy_k
# print time.time() - t
#
# assert np.all(numpy_result == our_result)
# Lazy evaluation with compilation separate from timing
f, a = lazy_dst.slice_assign(lazy_i * lazy_j * lazy_k)
t = time.time()
for i in range(10):
f(*a)
print time.time() - t
# assert np.all(numpy_result == our_result)
if __name__ == '__main__':
# test()
test2()
#import doctest
#doctest.testmod() | 31.665254 | 88 | 0.598689 | 941 | 7,473 | 4.581296 | 0.301807 | 0.032475 | 0.019485 | 0.020877 | 0.14405 | 0.10276 | 0.083507 | 0.083507 | 0.06263 | 0.020877 | 0 | 0.050151 | 0.290245 | 7,473 | 236 | 89 | 31.665254 | 0.762632 | 0.066506 | 0 | 0.101351 | 0 | 0 | 0.025075 | 0 | 0 | 0 | 0 | 0.004237 | 0.013514 | 0 | null | null | 0 | 0.067568 | null | null | 0.027027 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
88d6c16bd034eb1de44ec24a0c0ef9b55a008104 | 1,332 | py | Python | lab4/alr/lexer.py | aslastin/ITMO-Translation-Methods-y2021 | 9c688e91b13ab537bb5a924ab50d21468ba69027 | [
"MIT"
] | null | null | null | lab4/alr/lexer.py | aslastin/ITMO-Translation-Methods-y2021 | 9c688e91b13ab537bb5a924ab50d21468ba69027 | [
"MIT"
] | null | null | null | lab4/alr/lexer.py | aslastin/ITMO-Translation-Methods-y2021 | 9c688e91b13ab537bb5a924ab50d21468ba69027 | [
"MIT"
] | null | null | null | import re
from alr.input_streams import InputStream
from alr.instances import Terminal, Token, END_TERMINAL_NAME
class LexerException(Exception):
pass
class Lexer:
def next(self) -> Token:
pass
def findMatchInfo(terminals: [Terminal], data: str):
for terminal in terminals:
match = re.match(terminal.regexp, data)
if match:
return match, terminal
return None
def lexer(terminals: [Terminal], skip_terminals: [Terminal], input_stream: InputStream) -> Token:
source = input_stream.source
len_source = len(source)
while not input_stream.is_consumed:
data = input_stream.get()
match_info = findMatchInfo(skip_terminals, data)
if match_info:
match, _ = match_info
input_stream.consume(match.end())
continue
match_info = findMatchInfo(terminals, data)
if match_info:
match, terminal = match_info
end = match.end()
yield Token(terminal, input_stream.index, input_stream.index + end, source)
input_stream.consume(end)
continue
raise LexerException(f'pos: {input_stream.index}: Can not parse {data}')
endTerminal = Terminal(END_TERMINAL_NAME, len(terminals))
yield Token(endTerminal, len_source, len_source, source)
| 30.272727 | 97 | 0.662162 | 155 | 1,332 | 5.516129 | 0.329032 | 0.115789 | 0.038596 | 0.042105 | 0.067836 | 0.067836 | 0 | 0 | 0 | 0 | 0 | 0 | 0.253754 | 1,332 | 43 | 98 | 30.976744 | 0.860161 | 0 | 0 | 0.176471 | 0 | 0 | 0.035285 | 0.015766 | 0 | 0 | 0 | 0 | 0 | 1 | 0.088235 | false | 0.058824 | 0.088235 | 0 | 0.294118 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
88d7be55caf170f63950978db5728c361f0fa0b1 | 1,253 | py | Python | internos/survey/migrations/0008_monitoringreporting.py | UNICEFLebanonInnovation/Staging-Neuro | aac1e4f335ff4ec32041f989a9c22f8581a4961a | [
"MIT"
] | null | null | null | internos/survey/migrations/0008_monitoringreporting.py | UNICEFLebanonInnovation/Staging-Neuro | aac1e4f335ff4ec32041f989a9c22f8581a4961a | [
"MIT"
] | null | null | null | internos/survey/migrations/0008_monitoringreporting.py | UNICEFLebanonInnovation/Staging-Neuro | aac1e4f335ff4ec32041f989a9c22f8581a4961a | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by Django 1.11.20 on 2020-05-13 23:41
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import smart_selects.db_fields
class Migration(migrations.Migration):
dependencies = [
('survey', '0007_auto_20200513_0150'),
]
operations = [
migrations.CreateModel(
name='MonitoringReporting',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('reporting_date', models.DateField()),
('number', models.FloatField(blank=True, null=True)),
('source_text', models.CharField(blank=True, max_length=1500, null=True)),
('source_url', models.URLField(blank=True, max_length=1500, null=True)),
('category', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='+', to='survey.ItemCategory')),
('item', smart_selects.db_fields.ChainedForeignKey(blank=True, chained_field='category', chained_model_field='category', null=True, on_delete=django.db.models.deletion.CASCADE, to='survey.Item')),
],
),
]
| 41.766667 | 212 | 0.649641 | 141 | 1,253 | 5.588652 | 0.531915 | 0.040609 | 0.053299 | 0.083756 | 0.170051 | 0.170051 | 0.170051 | 0 | 0 | 0 | 0 | 0.04251 | 0.211492 | 1,253 | 29 | 213 | 43.206897 | 0.755061 | 0.055068 | 0 | 0 | 1 | 0 | 0.128704 | 0.019475 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.181818 | 0 | 0.318182 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
88dde821475b9b36f9a23a10e09e40041dcb1e93 | 2,610 | py | Python | dynsimf/test/test_model.py | Tensaiz/DyNSimF | 6288ff83f1b3f56fa626f741b55ade57b7c1b358 | [
"BSD-2-Clause"
] | 3 | 2020-11-04T08:52:33.000Z | 2021-01-27T22:27:07.000Z | dynsimf/test/test_model.py | Tensaiz/DyNSimF | 6288ff83f1b3f56fa626f741b55ade57b7c1b358 | [
"BSD-2-Clause"
] | 2 | 2020-11-13T07:49:33.000Z | 2020-11-16T08:22:13.000Z | dynsimf/test/test_model.py | Tensaiz/DyNSimF | 6288ff83f1b3f56fa626f741b55ade57b7c1b358 | [
"BSD-2-Clause"
] | null | null | null | import unittest
from dynsimf.models.Model import Model
from dynsimf.models.Model import ModelConfiguration
import networkx as nx
import numpy as np
__author__ = "Mathijs Maijer"
__email__ = "m.f.maijer@gmail.com"
class ModelTest(unittest.TestCase):
def test_model_init(self):
g = nx.random_geometric_graph(10, 0.1)
m = Model(g)
self.assertTrue(isinstance(m, Model))
def test_model_constants(self):
g = nx.random_geometric_graph(10, 0.1)
m = Model(g)
d = {1: 2}
m.constants = d
self.assertEqual(m.constants, d)
def test_set_states(self):
g = nx.random_geometric_graph(10, 0.1)
m = Model(g)
m.set_states(['1', '2'])
self.assertTrue((np.zeros((10, 2)) == m.node_states).any())
self.assertEqual(m.state_names, ['1', '2'])
self.assertEqual(m.state_map, {'1': 0, '2': 1})
def test_assign_constant(self):
# Network definition
g = nx.random_geometric_graph(10, 0.1)
m = Model(g)
initial_state = {
'x': 0,
}
def update_x():
return {'x': 1}
# Model definition
m.set_states(['x'])
m.add_update(update_x)
m.set_initial_state(initial_state)
output = m.simulate(1)
self.assertEqual(list(output['states'][0]), list(np.zeros((10,1))))
self.assertEqual(list(output['states'][1]), list(np.ones((10,1))))
def test_assign_array(self):
# Network definition
g = nx.random_geometric_graph(10, 0.1)
m = Model(g)
initial_state = {
'x': 0,
}
def update_x():
return {'x': np.arange(10)}
# Model definition
m.set_states(['x'])
m.add_update(update_x)
m.set_initial_state(initial_state)
output = m.simulate(1)
self.assertEqual(list(output['states'][0]), list(np.zeros((10,1))))
self.assertEqual(list(output['states'][1]), list(np.arange(10)))
def test_assign_nodes(self):
# Network definition
g = nx.random_geometric_graph(10, 0.1)
m = Model(g)
initial_state = {
'x': 0,
}
def update_x():
return {'x': {0: 5, 5: 10}}
# Model definition
m.set_states(['x'])
m.add_update(update_x)
m.set_initial_state(initial_state)
output = m.simulate(1)
# self.assertEqual(list(output['states'][0]), list(np.zeros((10,1))))
# self.assertEqual(list(output['states'][1]), list(np.arange(10)))
print(output)
| 26.363636 | 77 | 0.5659 | 348 | 2,610 | 4.071839 | 0.189655 | 0.095272 | 0.038109 | 0.076217 | 0.678193 | 0.638673 | 0.638673 | 0.638673 | 0.638673 | 0.638673 | 0 | 0.039142 | 0.285441 | 2,610 | 98 | 78 | 26.632653 | 0.720643 | 0.091954 | 0 | 0.530303 | 0 | 0 | 0.030919 | 0 | 0 | 0 | 0 | 0 | 0.136364 | 1 | 0.136364 | false | 0 | 0.075758 | 0.045455 | 0.272727 | 0.015152 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
88deab0a5f3046c5e43d3c7ac8bb2269ee606875 | 798 | py | Python | effdet/config/train_config.py | phager90/efficientdet-pytorch | bbd84c0e7ec2a23c6ae7447c437789524ba141dd | [
"Apache-2.0"
] | 1,386 | 2020-03-27T07:05:36.000Z | 2022-03-31T17:27:50.000Z | effdet/config/train_config.py | dmatos2012/efficientdet-pytorch | 301487e859fa8160dd3e01b7dbc54d713b392676 | [
"Apache-2.0"
] | 176 | 2020-03-27T07:07:36.000Z | 2022-03-15T19:49:53.000Z | effdet/config/train_config.py | dmatos2012/efficientdet-pytorch | 301487e859fa8160dd3e01b7dbc54d713b392676 | [
"Apache-2.0"
] | 276 | 2020-03-28T10:16:24.000Z | 2022-03-30T19:27:12.000Z | from omegaconf import OmegaConf
def default_detection_train_config():
# FIXME currently using args for train config, will revisit, perhaps move to Hydra
h = OmegaConf.create()
# dataset
h.skip_crowd_during_training = True
# augmentation
h.input_rand_hflip = True
h.train_scale_min = 0.1
h.train_scale_max = 2.0
h.autoaugment_policy = None
# optimization
h.momentum = 0.9
h.learning_rate = 0.08
h.lr_warmup_init = 0.008
h.lr_warmup_epoch = 1.0
h.first_lr_drop_epoch = 200.0
h.second_lr_drop_epoch = 250.0
h.clip_gradients_norm = 10.0
h.num_epochs = 300
# regularization l2 loss.
h.weight_decay = 4e-5
h.lr_decay_method = 'cosine'
h.moving_average_decay = 0.9998
h.ckpt_var_scope = None
return h
| 22.8 | 86 | 0.684211 | 126 | 798 | 4.063492 | 0.626984 | 0.019531 | 0.042969 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.060855 | 0.238095 | 798 | 34 | 87 | 23.470588 | 0.78125 | 0.172932 | 0 | 0 | 0 | 0 | 0.009174 | 0 | 0 | 0 | 0 | 0.029412 | 0 | 1 | 0.047619 | false | 0 | 0.047619 | 0 | 0.142857 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
88e1430895468040e8411cfbb8e3a4d3e807e9b5 | 343 | py | Python | CryptoHack Resources/Introduction-to-CryptoHack/great_snakes_35381fca29d68d8f3f25c9fa0a9026fb.py | ClutchKick2207/CryptoHack | c73cb47ae860420296ef200d506155c04505f5fe | [
"MIT"
] | null | null | null | CryptoHack Resources/Introduction-to-CryptoHack/great_snakes_35381fca29d68d8f3f25c9fa0a9026fb.py | ClutchKick2207/CryptoHack | c73cb47ae860420296ef200d506155c04505f5fe | [
"MIT"
] | null | null | null | CryptoHack Resources/Introduction-to-CryptoHack/great_snakes_35381fca29d68d8f3f25c9fa0a9026fb.py | ClutchKick2207/CryptoHack | c73cb47ae860420296ef200d506155c04505f5fe | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
import sys
# import this
if sys.version_info.major == 2:
print("You are running Python 2, which is no longer supported. Please update to Python 3.")
ords = [81, 64, 75, 66, 70, 93, 73, 72, 1, 92, 109, 2, 84, 109, 66, 75, 70, 90, 2, 92, 79]
print("Here is your flag:")
print("".join(chr(o ^ 0x32) for o in ords))
| 26.384615 | 95 | 0.638484 | 66 | 343 | 3.30303 | 0.757576 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.174545 | 0.198251 | 343 | 12 | 96 | 28.583333 | 0.618182 | 0.09621 | 0 | 0 | 0 | 0 | 0.324675 | 0 | 0 | 0 | 0.012987 | 0 | 0 | 1 | 0 | false | 0 | 0.166667 | 0 | 0.166667 | 0.5 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 |
88e8a829aa4fdf6e93f9029ece59967d4ebdae3e | 636 | py | Python | RNAPuzzles/rnapuzzles/views/faq/detail.py | whinyadventure/RNA-Puzzles | bbd147e1a0748a77b5e3424a93ad57bb430b5a0e | [
"Apache-2.0"
] | null | null | null | RNAPuzzles/rnapuzzles/views/faq/detail.py | whinyadventure/RNA-Puzzles | bbd147e1a0748a77b5e3424a93ad57bb430b5a0e | [
"Apache-2.0"
] | 26 | 2019-10-08T11:11:25.000Z | 2022-03-12T00:52:30.000Z | RNAPuzzles/rnapuzzles/views/faq/detail.py | whinyadventure/RNA-Puzzles | bbd147e1a0748a77b5e3424a93ad57bb430b5a0e | [
"Apache-2.0"
] | 1 | 2020-05-11T18:51:04.000Z | 2020-05-11T18:51:04.000Z | from django.http import Http404, HttpResponseRedirect
from django.urls import reverse
from django.views.generic import DetailView
from guardian.mixins import PermissionRequiredMixin
from rnapuzzles.models import NewsModel, FaqModel
class Detail(DetailView):
model = FaqModel
def get(self, request, *args, **kwargs):
print(request)
try:
self.object = self.get_object()
except Http404:
# redirect here
return HttpResponseRedirect(reverse("faq_list"))
context = self.get_context_data(object=self.object)
return self.render_to_response(context)
| 22.714286 | 60 | 0.701258 | 70 | 636 | 6.285714 | 0.585714 | 0.068182 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.012195 | 0.226415 | 636 | 27 | 61 | 23.555556 | 0.882114 | 0.02044 | 0 | 0 | 0 | 0 | 0.012945 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.066667 | false | 0 | 0.333333 | 0 | 0.666667 | 0.066667 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
88e8f4b9f4625f3e3c6e8cae468da3b5352085b7 | 1,611 | py | Python | crate/web/lists/models.py | vijay2312/crate.web | dbf078485675ecd568e33a170d31b068949ec9bf | [
"BSD-2-Clause"
] | 1 | 2021-06-23T18:14:30.000Z | 2021-06-23T18:14:30.000Z | crate/web/lists/models.py | vijay2312/crate.web | dbf078485675ecd568e33a170d31b068949ec9bf | [
"BSD-2-Clause"
] | null | null | null | crate/web/lists/models.py | vijay2312/crate.web | dbf078485675ecd568e33a170d31b068949ec9bf | [
"BSD-2-Clause"
] | null | null | null | from django.core.urlresolvers import reverse
from django.db import models, IntegrityError
from django.template.defaultfilters import slugify
from django.utils.translation import ugettext_lazy as _
from model_utils.models import TimeStampedModel
class List(TimeStampedModel):
user = models.ForeignKey("auth.User")
# Translators: This is used to allow naming a specific list of packages.
name = models.CharField(_("Name"), max_length=50, db_index=True)
slug = models.SlugField(max_length=50)
description = models.CharField(max_length=250, blank=True)
private = models.BooleanField(_("Private List"), default=False, help_text=_("Private lists are visible only to you."))
packages = models.ManyToManyField("packages.Package", verbose_name=_("Packages"))
class Meta:
unique_together = [
("user", "name"),
("user", "slug"),
]
def save(self, *args, **kwargs):
if not self.name:
raise IntegrityError("Name cannot be empty")
if not self.slug:
slug = slugify(self.name)
i = 1
while List.objects.filter(user=self.user, slug=slug).exists():
slug = slugify(u"%s %s" % (self.name, i))
i += 1
self.slug = slug
return super(List, self).save(*args, **kwargs)
def __unicode__(self):
return u"%(username)s / %(listname)s" % {"username": self.user.username, "listname": self.name}
def get_absolute_url(self):
return reverse("lists_detail", kwargs={"username": self.user.username, "slug": self.slug})
| 33.5625 | 122 | 0.645562 | 196 | 1,611 | 5.19898 | 0.469388 | 0.039254 | 0.02159 | 0.047105 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.007252 | 0.229671 | 1,611 | 47 | 123 | 34.276596 | 0.81386 | 0.043451 | 0 | 0 | 0 | 0 | 0.126706 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.09375 | false | 0 | 0.15625 | 0.0625 | 0.59375 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
88ef565844fc93b1c848603e6e092f169990260a | 3,152 | py | Python | python/diodberg/renderers/serial_renderers.py | ikea-lisp-code/diodberg | 843a80959f9ba777f09345209abeaa5bcd3a7a16 | [
"MIT"
] | 5 | 2015-10-30T04:53:51.000Z | 2019-08-10T22:32:55.000Z | python/diodberg/renderers/serial_renderers.py | ikea-lisp-code/diodberg | 843a80959f9ba777f09345209abeaa5bcd3a7a16 | [
"MIT"
] | null | null | null | python/diodberg/renderers/serial_renderers.py | ikea-lisp-code/diodberg | 843a80959f9ba777f09345209abeaa5bcd3a7a16 | [
"MIT"
] | null | null | null | from diodberg.core.renderer import Renderer
import sys
try:
import serial
except ImportError as err:
sys.stderr.write("Error: failed to import module ({})".format(err))
class DMXSerialRenderer(Renderer):
""" DMXSerialRenderer provides a renderer interface to a custom DMX shield
using the RaspberryPi serial port.
TODO: The baudrate on the Pi currently ceilings at 115200 baud. Change back to
250000 baud when fixed on the Pi-side.
"""
__dmx_buffer_size = 512
__default_channel_val = 0
__device_name = "/dev/ttyAMA0"
__baud_rateHz = 115200
__timeout = 3.
__bytesize = serial.EIGHTBITS
__parity = serial.PARITY_NONE
__stopbits = serial.STOPBITS_TWO
__slots__ = {'__port', '__buffer'}
def __init__(self, universes = 1):
super(DMXSerialRenderer, self).__init__()
self.__port = serial.Serial(port = DMXSerialRenderer.__device_name)
self.__port.baudrate = DMXSerialRenderer.__baud_rateHz
self.__port.bytesize = DMXSerialRenderer.__bytesize
self.__port.parity = DMXSerialRenderer.__parity
self.__port.stopbits = DMXSerialRenderer.__stopbits
self.__port.timeout = DMXSerialRenderer.__timeout
# Initialize a shared storage buffer
default_buffer = [DMXSerialRenderer.__default_channel_val]*DMXSerialRenderer.__dmx_buffer_size
self.__buffer = {}
for i in xrange(universes):
self.__buffer[i] = bytearray(default_buffer)
def render(self, panel):
# Fill in the buffer.
for loc, pixel in panel.iteritems():
if pixel.live:
universe = pixel.address.universe
address = pixel.address.address
self.__buffer[universe][address] = pixel.color.red
self.__buffer[universe][address + 1] = pixel.color.green
self.__buffer[universe][address + 2] = pixel.color.blue
# Send the buffer over DMX.
for universe, buf in self.__buffer.iteritems():
self.send_dmx(universe, buf)
def send_dmx(self, universe, buf):
""" Sends the DMX packet over serial.
"""
self.__port.baudrate = DMXSerialRenderer.__baud_rateHz/2
self.__port.write(chr(0))
self.__port.baudrate = DMXSerialRenderer.__baud_rateHz
self.__port.write(chr(0))
self.__port.write(buf)
def close(self):
""" Close the serial port.
"""
self.__port.close()
def __del__(self):
self.close()
def __repr__(self):
return "DMXSerialRenderer"
def pi_serial_main(num = 1):
""" Runs a test routine for testing serial DMX output."""
from diodberg.core.runner import Controller
from diodberg.core.types import random_panel
from diodberg.user_plugins.examples import CycleHue
panel = random_panel(size = (num, 1), num_pixels = num, live = True)
renderer = DMXSerialRenderer()
runner = CycleHue(panel, renderer, sleep = 1.)
controller = Controller(panel, renderer)
controller.run(runner)
if __name__ == "__main__":
pi_serial_main()
| 35.41573 | 102 | 0.658947 | 358 | 3,152 | 5.432961 | 0.349162 | 0.049357 | 0.024679 | 0.0509 | 0.092031 | 0.092031 | 0.069923 | 0.052442 | 0 | 0 | 0 | 0.013971 | 0.250635 | 3,152 | 88 | 103 | 35.818182 | 0.809483 | 0.137373 | 0 | 0.065574 | 0 | 0 | 0.03215 | 0 | 0 | 0 | 0 | 0.011364 | 0 | 1 | 0.114754 | false | 0 | 0.131148 | 0.016393 | 0.42623 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
88fb8dc0c3f47a6c1f40813c30af4b8782ab7ba2 | 1,249 | py | Python | nysa/cbuilder/sdb.py | CospanDesign/nysa | ffe07f0b8fe2f6217e7a862d89b80f1b17163be9 | [
"MIT"
] | 15 | 2015-08-31T20:50:39.000Z | 2022-03-13T08:56:39.000Z | nysa/cbuilder/sdb.py | CospanDesign/nysa | ffe07f0b8fe2f6217e7a862d89b80f1b17163be9 | [
"MIT"
] | 5 | 2015-05-02T16:48:57.000Z | 2017-06-15T16:25:34.000Z | nysa/cbuilder/sdb.py | CospanDesign/nysa | ffe07f0b8fe2f6217e7a862d89b80f1b17163be9 | [
"MIT"
] | 6 | 2016-09-02T16:02:13.000Z | 2021-06-29T22:29:45.000Z | #! /usr/bin/python
# Copyright (c) 2015 Dave McCoy (dave.mccoy@cospandesign.com)
#
# This file is part of Nysa.
# (http://wiki.cospandesign.com/index.php?title=Nysa.org)
#
# Nysa is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# any later version.
#
# Nysa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Nysa; If not, see <http://www.gnu.org/licenses/>.
__author__ = "dave.mccoy@cospandesign.com (Dave McCoy)"
from array import array as Array
import collections
from nysa.common.status import Status
class SDBError(Exception):
pass
class SDBWarning(Exception):
pass
class SDBInfo(Exception):
pass
class SDB (object):
def __init__(self, status = None):
self.d = {}
self.s = status
if status is None:
self.s = Status()
for e in self.ELEMENTS:
self.d[e] = ""
| 26.020833 | 70 | 0.702162 | 186 | 1,249 | 4.672043 | 0.543011 | 0.041427 | 0.044879 | 0.065593 | 0.094361 | 0.064442 | 0 | 0 | 0 | 0 | 0 | 0.005071 | 0.210568 | 1,249 | 47 | 71 | 26.574468 | 0.876268 | 0.582866 | 0 | 0.166667 | 0 | 0 | 0.079681 | 0.053785 | 0 | 0 | 0 | 0 | 0 | 1 | 0.055556 | false | 0.166667 | 0.166667 | 0 | 0.444444 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
88fd8d9eeaa742b1676ada01bf8c212cd2db4417 | 3,543 | py | Python | DataIngestion_FeaturePreparation/utilFunctions.py | Glorf/SparkDLTrigger | 6082efd33d6f306fd00a900b80f55a6a3ca19054 | [
"Apache-2.0"
] | 24 | 2019-04-24T15:10:29.000Z | 2021-09-01T14:55:41.000Z | DataIngestion_FeaturePreparation/utilFunctions.py | Glorf/SparkDLTrigger | 6082efd33d6f306fd00a900b80f55a6a3ca19054 | [
"Apache-2.0"
] | null | null | null | DataIngestion_FeaturePreparation/utilFunctions.py | Glorf/SparkDLTrigger | 6082efd33d6f306fd00a900b80f55a6a3ca19054 | [
"Apache-2.0"
] | 11 | 2019-04-24T14:14:10.000Z | 2021-07-16T07:46:00.000Z | import numpy as np
import math
from pyspark.sql import Row
"""
Implementation of Lorentz vector
"""
class LorentzVector(object):
def __init__(self, *args):
if len(args)>0:
self.x = args[0]
self.y = args[1]
self.z = args[2]
self.t = args[3]
def SetPtEtaPhiM(self, pt, eta, phi, mass):
pt = abs(pt)
self.SetXYZM(pt*math.cos(phi), pt*math.sin(phi), pt*math.sinh(eta), mass)
def SetXYZM(self, x, y, z, m):
self.x = x;
self.y = y
self.z = z
if (m>=0):
self.t = math.sqrt(x*x + y*y + z*z + m*m)
else:
self.t = math.sqrt(max(x*x + y*y + z*z - m*m, 0))
def E(self):
return self.t
def Px(self):
return self.x
def Py(self):
return self.y
def Pz(self):
return self.z
def Pt(self):
return math.sqrt(self.x*self.x + self.y*self.y)
def Eta(self):
cosTheta = self.CosTheta()
if cosTheta*cosTheta<1:
return -0.5*math.log((1.0 - cosTheta)/(1.0 + cosTheta))
if self.z == 0: return 0
def mag(self):
return math.sqrt(self.x*self.x + self.y*self.y + self.z*self.z)
def CosTheta(self):
return 1.0 if self.mag()==0.0 else self.z/self.mag()
def Phi(self):
return math.atan2(self.y, self.x)
def DeltaR(self, other):
deta = self.Eta() - other.Eta()
dphi = self.Phi() - other.Phi()
pi = math.pi
while dphi > pi: dphi -= 2*pi
while dphi < -pi: dphi += 2*pi
return math.sqrt(deta*deta + dphi*dphi)
"""
Functions used to return the Pt map of selected tracks, neutrals and photons
"""
def ChPtMapp(DR, event):
pTmap = []
for h in event.EFlowTrack:
if h.PT<= 0.5: continue
pTmap.append([h.Eta, h.Phi, h.PT])
return np.asarray(pTmap)
def NeuPtMapp(DR, event):
pTmap = []
for h in event.EFlowNeutralHadron:
if h.ET<= 1.0: continue
pTmap.append([h.Eta, h.Phi, h.ET])
return np.asarray(pTmap)
def PhotonPtMapp(DR, event):
pTmap = []
for h in event.EFlowPhoton:
if h.ET<= 1.0: continue
pTmap.append([h.Eta, h.Phi, h.ET])
return np.asarray(pTmap)
"""
Functions used to return the Pt map of selected tracks, neutrals and photons
Versions used for the optimized filtering with Spark SQL and HOF
"""
# get the selected tracks
def ChPtMapp2(Tracks):
#pTmap = []
pTmap = np.zeros((len(Tracks), 3))
for i, h in enumerate(Tracks):
pTmap[i] = [h["Eta"], h["Phi"], h["PT"]]
return pTmap
# get the selected neutrals
def NeuPtMapp2(NeutralHadrons):
pTmap = np.zeros((len(NeutralHadrons), 3))
for i, h in enumerate(NeutralHadrons):
pTmap[i] = [h["Eta"], h["Phi"], h["ET"]]
return pTmap
# get the selected photons
def PhotonPtMapp2(Photons):
pTmap = np.zeros((len(Photons), 3))
for i, h in enumerate(Photons):
pTmap[i] = [h["Eta"], h["Phi"], h["ET"]]
return pTmap
"""
Get the particle ISO
"""
def PFIso(p, DR, PtMap, subtractPt):
if p.Pt() <= 0.: return 0.
DeltaEta = PtMap[:,0] - p.Eta()
DeltaPhi = PtMap[:,1] - p.Phi()
twopi = 2.* math.pi
DeltaPhi = DeltaPhi - twopi*(DeltaPhi > twopi) + twopi*(DeltaPhi < -1.*twopi)
isInCone = DeltaPhi*DeltaPhi + DeltaEta*DeltaEta < DR*DR
Iso = PtMap[isInCone, 2].sum()/p.Pt()
if subtractPt: Iso = Iso -1
return float(Iso)
| 26.639098 | 82 | 0.554332 | 528 | 3,543 | 3.712121 | 0.210227 | 0.022959 | 0.015306 | 0.02449 | 0.338776 | 0.314796 | 0.288776 | 0.218367 | 0.195918 | 0.195918 | 0 | 0.017221 | 0.29523 | 3,543 | 132 | 83 | 26.840909 | 0.767721 | 0.023709 | 0 | 0.163043 | 0 | 0 | 0.007612 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.217391 | false | 0 | 0.032609 | 0.086957 | 0.445652 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
88fdebb967418d6eb4f2c2f57edfe2ef72a2a5b2 | 15,565 | py | Python | gym_pybullet_drones/envs/single_agent_rl/BaseSingleAgentAviary.py | ramonfontes/gym-pybullet-drones | 7fc1dbd3d52086972775fe0143866f7706f7e5bb | [
"MIT"
] | null | null | null | gym_pybullet_drones/envs/single_agent_rl/BaseSingleAgentAviary.py | ramonfontes/gym-pybullet-drones | 7fc1dbd3d52086972775fe0143866f7706f7e5bb | [
"MIT"
] | null | null | null | gym_pybullet_drones/envs/single_agent_rl/BaseSingleAgentAviary.py | ramonfontes/gym-pybullet-drones | 7fc1dbd3d52086972775fe0143866f7706f7e5bb | [
"MIT"
] | null | null | null | import os
import numpy as np
from scipy.optimize import nnls
from gym import spaces
import pybullet as p
import pybullet_data
from gym_pybullet_drones.envs.BaseAviary import DroneModel, Physics, ImageType, BaseAviary
######################################################################################################################################################
#### Single drone environment class for reinforcement learning applications (in this implementation, taking off from the origin) #####################
######################################################################################################################################################
class BaseSingleAgentAviary(BaseAviary):
####################################################################################################
#### Initialize the environment ####################################################################
####################################################################################################
#### Arguments #####################################################################################
#### - drone_model (DroneModel) desired drone type (associated to an .urdf file) ###########
#### - num_drones (int) desired number of drones in the aviary #####################
#### - neighbourhood_radius (float) used to compute the drones' adjacency matrix, in meters ####
#### - initial_xyzs ((3,1) array) initial XYZ position of the drones #########################
#### - initial_rpys ((3,1) array) initial orientations of the drones (radians) ###############
#### - physics (Physics) desired implementation of physics/dynamics #################
#### - freq (int) the frequency (Hz) at which the physics engine advances ####
#### - aggregate_phy_steps (int) number of physics updates within one call of .step() #######
#### - gui (bool) whether to use PyBullet's GUI ##############################
#### - record (bool) whether to save a video of the simulation ##################
#### - obstacles (bool) whether to add obstacles to the simulation #################
#### - user_debug_gui (bool) whether to draw the drones' axes and the GUI sliders #######
#### - img_obs (bool) ...
#### - dyn_input (bool) ...
####################################################################################################
def __init__(self, drone_model: DroneModel=DroneModel.CF2X, num_drones: int=1,
neighbourhood_radius: float=np.inf, initial_xyzs=None, initial_rpys=None,
physics: Physics=Physics.PYB, freq: int=240, aggregate_phy_steps: int=1,
gui=False, record=False, obstacles=True, user_debug_gui=True, img_obs=False, dyn_input=False):
if num_drones!=1: print("[ERROR] in BaseSingleAgentAviary.__init__(), BaseSingleAgentAviary only accepts num_drones=1"); exit()
self.IMG_OBS = img_obs
self.DYN_IN = dyn_input
if self.IMG_OBS:
self.IMG_RES = np.array([64, 48]); self.IMG_FRAME_PER_SEC = 24; self.IMG_CAPTURE_FREQ = int(freq/self.IMG_FRAME_PER_SEC)
self.rgb = np.zeros(((num_drones, self.IMG_RES[1], self.IMG_RES[0], 4))); self.dep = np.ones(((num_drones, self.IMG_RES[1], self.IMG_RES[0]))); self.seg = np.zeros(((num_drones, self.IMG_RES[1], self.IMG_RES[0])))
if self.IMG_CAPTURE_FREQ%aggregate_phy_steps!=0: print("[ERROR] in BaseSingleAgentAviary.__init__(), aggregate_phy_steps incompatible with the desired video capture frame rate ({:f}Hz)".format(self.IMG_FRAME_PER_SEC)); exit()
super().__init__(drone_model=drone_model, neighbourhood_radius=neighbourhood_radius,
initial_xyzs=initial_xyzs, initial_rpys=initial_rpys, physics=physics, freq=freq,
aggregate_phy_steps=aggregate_phy_steps, gui=gui, record=record, obstacles=obstacles, user_debug_gui=user_debug_gui)
if self.DYN_IN:
if self.DRONE_MODEL==DroneModel.CF2X: self.A = np.array([ [1, 1, 1, 1], [.5, .5, -.5, -.5], [-.5, .5, .5, -.5], [-1, 1, -1, 1] ])
elif self.DRONE_MODEL in [DroneModel.CF2P, DroneModel.HB]: self.A = np.array([ [1, 1, 1, 1], [0, 1, 0, -1], [-1, 0, 1, 0], [-1, 1, -1, 1] ])
self.INV_A = np.linalg.inv(self.A); self.B_COEFF = np.array([1/self.KF, 1/(self.KF*self.L), 1/(self.KF*self.L), 1/self.KM])
####################################################################################################
#### Add obstacles to the environment from .urdf files #############################################
####################################################################################################
def _addObstacles(self):
if self.IMG_OBS:
p.loadURDF("block.urdf", [1,0,.1], p.getQuaternionFromEuler([0,0,0]), physicsClientId=self.CLIENT)
p.loadURDF("cube_small.urdf", [0,1,.1], p.getQuaternionFromEuler([0,0,0]), physicsClientId=self.CLIENT)
p.loadURDF("duck_vhacd.urdf", [-1,0,.1], p.getQuaternionFromEuler([0,0,0]), physicsClientId=self.CLIENT)
p.loadURDF("teddy_vhacd.urdf", [0,-1,.1], p.getQuaternionFromEuler([0,0,0]), physicsClientId=self.CLIENT)
####################################################################################################
#### Return the action space of the environment, a Box(4,) #########################################
####################################################################################################
def _actionSpace(self):
#### Action vector ######## P0 P1 P2 P3
act_lower_bound = np.array([-1, -1, -1, -1])
act_upper_bound = np.array([1, 1, 1, 1])
# return spaces.Box( low=act_lower_bound, high=act_upper_bound, dtype=np.float32 )
##### REMOVE
############
return spaces.Box( low=np.array([-1]), high=np.array([1]), dtype=np.float32 )
####################################################################################################
#### Return the observation space of the environment, a Box(20,) ###################################
####################################################################################################
def _observationSpace(self):
if self.IMG_OBS: return spaces.Box(low=0, high=255, shape=(self.IMG_RES[1], self.IMG_RES[0], 4), dtype=np.uint8)
else:
#### Observation vector ### X Y Z Q1 Q2 Q3 Q4 R P Y VX VY VZ WR WP WY P0 P1 P2 P3
obs_lower_bound = np.array([-1, -1, 0, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1])
obs_upper_bound = np.array([1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1])
# return spaces.Box( low=obs_lower_bound, high=obs_upper_bound, dtype=np.float32 )
##### REMOVE
############
return spaces.Box( low=np.array([0,-1]), high=np.array([1,1]), dtype=np.float32 )
####################################################################################################
#### Return the current observation of the environment #############################################
####################################################################################################
#### Returns #######################################################################################
#### - obs (20,) array for its content see _observationSpace() ####################
####################################################################################################
def _computeObs(self):
if self.IMG_OBS:
if self.step_counter%self.IMG_CAPTURE_FREQ==0:
self.rgb[0], self.dep[0], self.seg[0] = self._getDroneImages(0, segmentation=False)
# DEBUG ONLY, REMOVE TO IMPROVE RENDERING PERFORMANCE
#### Printing observation to PNG frames example ####################################################
if self.GUI:
path = os.path.dirname(os.path.abspath(__file__))+"/../../../files/test/"; os.makedirs(os.path.dirname(path), exist_ok=True)
self._exportImage(img_type=ImageType.RGB, img_input=self.rgb[0], path=path, frame_num=int(self.step_counter/self.IMG_CAPTURE_FREQ))
####################################################################################################
return self.rgb[0]
else:
obs = self._clipAndNormalizeState(self._getDroneStateVector(0))
# return obs
##### REMOVE
############
return np.hstack([ obs[2], obs[12] ])
####################################################################################################
#### Preprocess the action passed to step() ########################################################
####################################################################################################
#### Arguments #####################################################################################
#### - action ((4,1) array) unclipped RPMs commanded to the 4 motors of the drone ######
####################################################################################################
#### Returns #######################################################################################
#### - clipped_action ((4,1) array) clipped RPMs commanded to the 4 motors of the drone ########
####################################################################################################
def _preprocessAction(self, action):
if self.DYN_IN:
return self._nnlsRPM(thrust=self.MAX_THRUST*(action[0]+1)/2, x_torque=self.MAX_XY_TORQUE*action[1], y_torque=self.MAX_XY_TORQUE*action[2], z_torque=self.MAX_Z_TORQUE*action[3])
##### REMOVE
############
# return self._nnlsRPM(thrust=self.MAX_THRUST*(action[0]+1)/2, x_torque=0, y_torque=0, z_torque=0) # DEBUG ONLY
else:
rpm = self._normalizedActionToRPM(action)
# return np.clip(np.array(rpm), 0, self.MAX_RPM)
##### REMOVE
############
return np.repeat(self.HOVER_RPM+action*self.HOVER_RPM/20, 4) # DEBUG ONLY
# return np.repeat(self.HOVER_RPM+action*self.HOVER_RPM/20, 1) # DEBUG ONLY
####################################################################################################
#### Normalize the 20 values in the simulation state to the [-1,1] range ###########################
####################################################################################################
#### Arguments #####################################################################################
#### - state ((20,1) array) raw simulation state #######################################
####################################################################################################
#### Returns #######################################################################################
#### - ...
####################################################################################################
def _clipAndNormalizeState(self, state):
pass
####################################################################################################
#### Non-negative Least Squares (NNLS) RPM from desired thrust and torques ########################
####################################################################################################
#### Arguments #####################################################################################
#### - thrust (float) desired thrust along the local z-axis ######################
#### - x_torque (float) desired x-axis torque ######################################
#### - y_torque (float) desired y-axis torque ######################################
#### - z_torque (float) desired z-axis torque ######################################
####################################################################################################
#### Returns #######################################################################################
#### - rpm ((4,1) array) RPM values to apply to the 4 motors ########################
####################################################################################################
def _nnlsRPM(self, thrust, x_torque, y_torque, z_torque):
#### Check the feasibility of thrust and torques ###################################################
if self.GUI and (thrust<0 or thrust>self.MAX_THRUST): print("[WARNING] it", self.step_counter, "in DynCtrlAviary._nnlsRPM(), unfeasible thrust {:.2f} outside range [0, {:.2f}]".format(thrust, self.MAX_THRUST))
if self.GUI and np.abs(x_torque)>self.MAX_XY_TORQUE: print("[WARNING] it", self.step_counter, "in DynCtrlAviary._nnlsRPM(), unfeasible roll torque {:.2f} outside range [{:.2f}, {:.2f}]".format(x_torque, -self.MAX_XY_TORQUE, self.MAX_XY_TORQUE))
if self.GUI and np.abs(y_torque)>self.MAX_XY_TORQUE: print("[WARNING] it", self.step_counter, "in DynCtrlAviary._nnlsRPM(), unfeasible pitch torque {:.2f} outside range [{:.2f}, {:.2f}]".format(y_torque, -self.MAX_XY_TORQUE, self.MAX_XY_TORQUE))
if self.GUI and np.abs(z_torque)>self.MAX_Z_TORQUE: print("[WARNING] it", self.step_counter, "in DynCtrlAviary._nnlsRPM(), unfeasible yaw torque {:.2f} outside range [{:.2f}, {:.2f}]".format(z_torque, -self.MAX_Z_TORQUE, self.MAX_Z_TORQUE))
B = np.multiply(np.array([thrust, x_torque, y_torque, z_torque]), self.B_COEFF)
sq_rpm = np.dot(self.INV_A, B)
#### Use NNLS if any of the desired angular velocities is negative #################################
if np.min(sq_rpm)<0:
sol, res = nnls(self.A, B, maxiter=3*self.A.shape[1])
if self.GUI:
print("[WARNING] it", self.step_counter, "in DynCtrlAviary._nnlsRPM(), unfeasible squared rotor speeds, using NNLS")
print("Negative sq. rotor speeds:\t [{:.2f}, {:.2f}, {:.2f}, {:.2f}]".format(sq_rpm[0], sq_rpm[1], sq_rpm[2], sq_rpm[3]),
"\t\tNormalized: [{:.2f}, {:.2f}, {:.2f}, {:.2f}]".format(sq_rpm[0]/np.linalg.norm(sq_rpm), sq_rpm[1]/np.linalg.norm(sq_rpm), sq_rpm[2]/np.linalg.norm(sq_rpm), sq_rpm[3]/np.linalg.norm(sq_rpm)))
print("NNLS:\t\t\t\t [{:.2f}, {:.2f}, {:.2f}, {:.2f}]".format(sol[0], sol[1], sol[2], sol[3]),
"\t\t\tNormalized: [{:.2f}, {:.2f}, {:.2f}, {:.2f}]".format(sol[0]/np.linalg.norm(sol), sol[1]/np.linalg.norm(sol), sol[2]/np.linalg.norm(sol), sol[3]/np.linalg.norm(sol)),
"\t\tResidual: {:.2f}".format(res) )
sq_rpm = sol
return np.sqrt(sq_rpm)
| 85.521978 | 253 | 0.414134 | 1,503 | 15,565 | 4.133733 | 0.200266 | 0.018992 | 0.021729 | 0.023821 | 0.377756 | 0.333977 | 0.290842 | 0.226622 | 0.203122 | 0.187993 | 0 | 0.021547 | 0.194925 | 15,565 | 181 | 254 | 85.994475 | 0.474264 | 0.208288 | 0 | 0.126582 | 0 | 0.025316 | 0.130959 | 0.030513 | 0 | 0 | 0 | 0 | 0 | 1 | 0.101266 | false | 0.012658 | 0.088608 | 0 | 0.291139 | 0.113924 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
00040d6243fafd7ad8637217ce77ba3bde5d4dce | 255 | py | Python | BOJ/02000~02999/2800~2899/2858.py | shinkeonkim/today-ps | f3e5e38c5215f19579bb0422f303a9c18c626afa | [
"Apache-2.0"
] | 2 | 2020-01-29T06:54:41.000Z | 2021-11-07T13:23:27.000Z | BOJ/02000~02999/2800~2899/2858.py | shinkeonkim/Today_PS | bb0cda0ee1b9c57e1cfa38355e29d0f1c6167a44 | [
"Apache-2.0"
] | null | null | null | BOJ/02000~02999/2800~2899/2858.py | shinkeonkim/Today_PS | bb0cda0ee1b9c57e1cfa38355e29d0f1c6167a44 | [
"Apache-2.0"
] | null | null | null | from math import sqrt
R,B=list(map(int,input().split()))
S = R+B
for i in range(1, int(sqrt(S))+1):
if S % i ==0 :
a = i
b = S // i
if a<b:
a,b=b,a
if a>2 and b>2 and (a-2)*(b-2) == B:
print(a,b) | 23.181818 | 44 | 0.415686 | 54 | 255 | 1.962963 | 0.425926 | 0.056604 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.044586 | 0.384314 | 255 | 11 | 45 | 23.181818 | 0.630573 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.090909 | 0 | 0.090909 | 0.090909 | 0 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
00043adab868a44eb1376f819afe331317b86da7 | 635 | py | Python | hint_cli/format.py | agarthetiger/hint | db39325b4cd2a9d6678a35c63e188060a77eec1c | [
"MIT"
] | 1 | 2021-02-20T11:27:40.000Z | 2021-02-20T11:27:40.000Z | hint_cli/format.py | agarthetiger/hint | db39325b4cd2a9d6678a35c63e188060a77eec1c | [
"MIT"
] | 4 | 2020-09-14T08:45:31.000Z | 2021-09-28T05:26:33.000Z | hint_cli/format.py | agarthetiger/hint | db39325b4cd2a9d6678a35c63e188060a77eec1c | [
"MIT"
] | null | null | null | import re
import click
import rich
RE_COMMAND = re.compile(r"`(?P<command>.*?)`")
# See available colours listed under click.Style on
# https://click.palletsprojects.com/en/7.x/api/#utilities
TITLE_COLOUR = "cyan"
COMMAND_COLOUR = "blue"
def style_command(match):
return click.style(match.group('command'), bold=True, fg=COMMAND_COLOUR)
def format_for_stdout(line):
if line.startswith('#'):
return click.style(line.split(maxsplit=1)[-1], bold=True, fg=TITLE_COLOUR)
if line.startswith('*'):
line = RE_COMMAND.sub(style_command, line.split(maxsplit=1)[-1])
return ' ' + line
return line
| 24.423077 | 82 | 0.688189 | 90 | 635 | 4.744444 | 0.488889 | 0.070258 | 0.074941 | 0.084309 | 0.088993 | 0 | 0 | 0 | 0 | 0 | 0 | 0.009363 | 0.159055 | 635 | 25 | 83 | 25.4 | 0.790262 | 0.16378 | 0 | 0 | 0 | 0 | 0.070209 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.133333 | false | 0 | 0.2 | 0.066667 | 0.6 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
00052cf4e2e58f6cc352baf20ca7627cbaf2e48d | 2,850 | py | Python | dynamic_dispatch/__init__.py | XevoInc/dynamic_dispatch | fb4ec6fcb392c866637533180635650b0fe127b1 | [
"Apache-2.0"
] | 5 | 2020-06-12T22:23:04.000Z | 2020-06-14T19:37:52.000Z | dynamic_dispatch/__init__.py | XevoInc/dynamic_dispatch | fb4ec6fcb392c866637533180635650b0fe127b1 | [
"Apache-2.0"
] | null | null | null | dynamic_dispatch/__init__.py | XevoInc/dynamic_dispatch | fb4ec6fcb392c866637533180635650b0fe127b1 | [
"Apache-2.0"
] | null | null | null | """ Like functools.singledispatch, but dynamic, value-based dispatch. """
__all__ = ('dynamic_dispatch',)
import functools
import inspect
from typing import Union, Callable, Type, Hashable
from dynamic_dispatch._class import class_dispatch
from dynamic_dispatch._func import func_dispatch
from ._typeguard import typechecked
@typechecked(always=True)
def dynamic_dispatch(func: Union[Callable, Type, None] = None, *, default: bool = False):
"""
Value-based dynamic-dispatch class decorator.
Allows a class or function to have different implementations depending on the
value of func's first parameter. The decorated class or function can act as
the default implementation, if desired.
Additional implementations may be registered for dispatch using the register()
attribute of the dispatch class or function. If the implementation has a param
of the same name as the first of func, it will be passed along.
:Example:
>>> @dynamic_dispatch(default=True)
>>> def foo(bar: int):
>>> print(bar)
>>>
>>> @foo.dispatch(on=5)
>>> def _(bar: int, baz: int):
>>> print(bar * baz)
>>>
>>> @foo.dispatch(on=10)
>>> def _():
>>> print(-10)
>>>
>>> foo(1)
1
>>> foo(5, 10)
50
>>> foo(10)
-10
:Example:
>>> @dynamic_dispatch(default=True)
>>> class Foo:
>>> def __init__(self, foo: int):
>>> super().__init__()
>>> print(bar)
>>>
>>> @Foo.dispatch(foo=5)
>>> class Bar(Foo):
>>> def __init__(self, foo, bar):
>>> super().__init__(foo)
>>> print(foo * bar)
>>>
>>> Foo(1)
1
<__main__.Foo object at ...>
>>> Foo(5, 10)
50
<__main__.Bar object at ...>
:param func: class or function to add dynamic dispatch to.
:param default: whether or not to use func as the default implementation.
:returns: func with dynamic dispatch
"""
# Default was specified, wait until func is here too.
if func is None:
return functools.partial(dynamic_dispatch, default=default)
# Delegate depending on wrap type.
if inspect.isclass(func):
return class_dispatch(func, default)
func = func_dispatch(func, default=default)
# Alter register to hide implicit parameter.
dispatch = func.dispatch
def replacement(impl: Callable = None, *, on: Hashable):
if impl is None:
return functools.partial(replacement, on=on)
return dispatch(impl, arguments=inspect.signature(impl).parameters, on=on)
# Type checker complains if we assign directly.
setattr(func, 'dispatch', replacement)
return func
| 29.081633 | 89 | 0.603158 | 334 | 2,850 | 5.01497 | 0.344311 | 0.089552 | 0.035821 | 0.020299 | 0.093134 | 0 | 0 | 0 | 0 | 0 | 0 | 0.011794 | 0.285965 | 2,850 | 97 | 90 | 29.381443 | 0.811302 | 0.597895 | 0 | 0 | 0 | 0 | 0.025723 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.095238 | false | 0 | 0.285714 | 0 | 0.619048 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
000928fd102f8c6f8af00e4185ffc3aba1531060 | 1,776 | py | Python | steam/ext/tf2/protobufs/struct_messages.py | olifog/steam-ext-tf2 | 15d1c72d4b690ccda1309ae35558f5a3655badad | [
"MIT"
] | null | null | null | steam/ext/tf2/protobufs/struct_messages.py | olifog/steam-ext-tf2 | 15d1c72d4b690ccda1309ae35558f5a3655badad | [
"MIT"
] | null | null | null | steam/ext/tf2/protobufs/struct_messages.py | olifog/steam-ext-tf2 | 15d1c72d4b690ccda1309ae35558f5a3655badad | [
"MIT"
] | null | null | null | from __future__ import annotations
from typing_extensions import Self
from ....protobufs.struct_messages import StructMessage
from ....utils import StructIO
# some custom messages to make things a lot easier decoding/encoding wise
class CraftRequest(StructMessage):
recipe: int
items: list[int]
def __bytes__(self) -> bytes:
with StructIO() as io:
io.write_struct("<hh", self.recipe, len(self.items))
for item in self.items:
io.write_u64(item)
return io.buffer
class CraftResponse(StructMessage):
recipe_id: int
id_list: tuple[int, ...]
being_used: bool
def parse(self, data: bytes) -> CraftResponse:
with StructIO(data) as io:
self.recipe_id = io.read_i16()
_ = io.read_u32() # always 0 in mckay's experience
id_count = io.read_i16()
self.id_list = io.read_struct(f"<{id_count}Q")
self.being_used = False
return self
class SetItemStyleRequest(StructMessage):
item_id: int
style: int
def __bytes__(self) -> bytes:
with StructIO() as io:
io.write_u64(self.item_id)
io.write_u32(self.style)
return io.buffer
class DeleteItemRequest(StructMessage):
item_id: int
class WrapItemRequest(StructMessage):
wrapping_paper_id: int
item_id: int
class UnwrapItemRequest(StructMessage):
gift_id: int
class DeliverGiftRequest(StructMessage):
user_id64: int
gift_id: int
class OpenCrateRequest(StructMessage):
key_id: int
crate_id: int
class CacheSubscribedCheck(StructMessage):
def parse(self, data: bytes) -> Self:
return self # IDK how to decode this but I don't want to have to special case this
| 23.064935 | 91 | 0.658784 | 225 | 1,776 | 5.013333 | 0.404444 | 0.039894 | 0.044326 | 0.026596 | 0.113475 | 0.076241 | 0.076241 | 0.076241 | 0.076241 | 0.076241 | 0 | 0.011364 | 0.256757 | 1,776 | 76 | 92 | 23.368421 | 0.843182 | 0.096284 | 0 | 0.265306 | 0 | 0 | 0.009369 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.081633 | false | 0 | 0.081633 | 0.020408 | 0.734694 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
0009ee17c19df3dc7afee3d18b6b34fc302f1aaa | 6,697 | py | Python | cms/publications/models.py | dragon-dxw/nhs-ei.website | 6b513040f2cbf5c4359dc0f9431712d74bc6aa02 | [
"MIT"
] | null | null | null | cms/publications/models.py | dragon-dxw/nhs-ei.website | 6b513040f2cbf5c4359dc0f9431712d74bc6aa02 | [
"MIT"
] | 35 | 2021-06-25T10:22:48.000Z | 2022-03-30T11:26:22.000Z | cms/publications/models.py | dxw/nhs-ei.website | 6b513040f2cbf5c4359dc0f9431712d74bc6aa02 | [
"MIT"
] | null | null | null | from urllib.parse import urlparse
from cms.categories.models import Category, PublicationType, CategoryPage
from cms.publications.blocks import PublicationsBlocks
from django.core.paginator import EmptyPage, PageNotAnInteger, Paginator
from django.db import models
from django.db.models.fields.related import ForeignKey
from modelcluster.fields import ParentalKey
from wagtail.admin.edit_handlers import (
FieldPanel,
InlinePanel,
MultiFieldPanel,
StreamFieldPanel,
)
from wagtail.core.fields import RichTextField, StreamField
from wagtail.core.models import Page
class PublicationIndexPage(Page):
# title already in the Page class
# slug already in the Page class
subpage_types = ["publications.Publication"]
body = RichTextField(blank=True)
content_panels = Page.content_panels + [
FieldPanel("body"),
]
class Meta:
verbose_name = "Publications Index Page"
verbose_name_plural = "Publications Index Pages"
def get_latest_publications(num):
return Publication.objects.all().order_by("-latest_revision_created_at")[:num]
def get_context(self, request, *args, **kwargs):
"""
Publications can have one or more categories (topics) or publications (publication_type).
At the moment, you can only choose one or the other. I think that's best to avoid lots of empty
result sets but we will need a decision made on that. TODO.
"""
context = super().get_context(request, *args, **kwargs)
publication_ordering = request.GET.get("order") or "-latest_revision_created_at"
if request.GET.get("publication_type"):
context["publication_type_id"] = int(request.GET.get("publication_type"))
publications = (
Publication.objects.live()
.order_by(publication_ordering)
.filter(
publication_publication_type_relationship__publication_type=request.GET.get(
"publication_type"
)
)
)
# NOTE: filtering by category was commented out but I want see if it works -- Dragon.
elif request.GET.get("category"):
context["category_id"] = int(request.GET.get("category"))
publications = (
Publication.objects.live()
.order_by(publication_ordering)
.filter(
categorypage_category_relationship__category=request.GET.get(
"category"
)
)
)
# NOTE: end block for previous note -- Dragon.
else:
publications = Publication.objects.live().order_by(publication_ordering)
paginator = Paginator(publications, 16)
try:
items = paginator.page(request.GET.get("page"))
except PageNotAnInteger:
items = paginator.page(1)
except EmptyPage:
items = paginator.page(paginator.num_pages)
context["publications"] = items
context["publication_types"] = PublicationType.objects.all()
# categories isn't exposed on the webpage at all, and contains a lot of empty categories.
context["categories"] = Category.objects.all()
context["order"] = publication_ordering
return context
def get_wp_api_link(self):
# TODO: Pretty sure this is a debug feature that should be removed.
return f"https://www.england.nhs.uk/wp-json/wp/v2/documents/{self.wp_id}"
def get_wp_live_link(self):
# TODO: Pretty sure this is a debug feature that should be removed.
self_url_path = self.url
live_url_path = urlparse(self.wp_link).path
live_url = "https://www.england.nhs.uk{}".format(live_url_path)
print(self_url_path)
print(live_url_path)
return live_url
class PublicationPublicationTypeRelationship(models.Model):
publication = ParentalKey(
"publications.Publication",
related_name="publication_publication_type_relationship",
)
publication_type = ForeignKey(
"categories.PublicationType",
related_name="+",
on_delete=models.CASCADE,
)
class Publication(CategoryPage):
parent_page_types = ["publications.PublicationIndexPage"]
"""
title already in the Page class
slug already in the Page class
going to need to parse the html here to extract the text
"""
# going to need to parse the html here to extract the text
body = RichTextField(blank=True)
documents = StreamField(PublicationsBlocks, blank=True)
""" coming across form wordpress need to keep for now"""
wp_id = models.PositiveIntegerField(null=True, blank=True)
source = models.CharField(null=True, max_length=100, blank=True)
wp_slug = models.TextField(null=True, blank=True)
wp_link = models.TextField(null=True, blank=True)
component_fields = models.TextField(null=True, blank=True)
"""i think we can do away with this field
and use the text from body to create the exceprt"""
# excerpt = RichTextField(blank=True)
author = models.CharField(max_length=255, blank=True)
content_panels = Page.content_panels + [
InlinePanel(
"publication_publication_type_relationship", label="Publication Types"
),
InlinePanel(
"categorypage_category_relationship", label="Publication Categories"
),
FieldPanel("body"),
StreamFieldPanel("documents"),
MultiFieldPanel(
[
FieldPanel("wp_id"),
FieldPanel("author"),
FieldPanel("source"),
FieldPanel("wp_slug"),
FieldPanel("wp_link"),
FieldPanel("component_fields"),
],
heading="wordpress data we dont need in the end",
classname="collapsed collapsible",
),
]
def get_wp_api_link(self):
wp_source = self.source.replace("pages-", "")
wp_id = self.wp_id
if wp_source != "pages":
api_url = "https://www.england.nhs.uk/{}/wp-json/wp/v2/documents/{}".format(
wp_source, wp_id
)
else:
api_url = "https://www.england.nhs.uk/wp-json/wp/v2/documents/{}".format(
wp_id
)
return api_url
def get_wp_live_link(self):
self_url_path = self.url
live_url_path = urlparse(self.wp_link).path
live_url = "https://www.england.nhs.uk{}".format(live_url_path)
print(self_url_path)
print(live_url_path)
return live_url
| 36.2 | 103 | 0.637599 | 753 | 6,697 | 5.511288 | 0.280212 | 0.021687 | 0.02506 | 0.021687 | 0.332048 | 0.306265 | 0.242651 | 0.223855 | 0.209398 | 0.17759 | 0 | 0.002453 | 0.269524 | 6,697 | 184 | 104 | 36.396739 | 0.845871 | 0.11214 | 0 | 0.274074 | 0 | 0.007407 | 0.157265 | 0.049447 | 0 | 0 | 0 | 0.01087 | 0 | 1 | 0.044444 | false | 0 | 0.074074 | 0.014815 | 0.303704 | 0.02963 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
001d960b36b9299709cf24d57e522c49b191b84c | 623 | py | Python | funilaria/migrations/0009_auto_20191010_1655.py | ph0980/OPE-IndyCar | c574c892c2d1bf633b2aa229ed13e0940184cff9 | [
"Apache-2.0"
] | 2 | 2019-08-21T11:37:01.000Z | 2019-10-01T13:53:22.000Z | funilaria/migrations/0009_auto_20191010_1655.py | ph0980/OPE-IndyCar | c574c892c2d1bf633b2aa229ed13e0940184cff9 | [
"Apache-2.0"
] | 6 | 2020-06-05T23:06:08.000Z | 2022-02-10T11:06:34.000Z | funilaria/migrations/0009_auto_20191010_1655.py | ph0980/OPE-IndyCar | c574c892c2d1bf633b2aa229ed13e0940184cff9 | [
"Apache-2.0"
] | 1 | 2019-12-21T22:53:49.000Z | 2019-12-21T22:53:49.000Z | # Generated by Django 2.2 on 2019-10-10 19:55
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('funilaria', '0008_auto_20191009_0904'),
]
operations = [
migrations.AddField(
model_name='orcamento',
name='finalizado',
field=models.BooleanField(default=False),
),
migrations.AlterField(
model_name='ordemdeservico',
name='prazo_entrega',
field=models.DateField(default=datetime.datetime(2019, 10, 10, 16, 55, 5, 539559)),
),
]
| 24.92 | 95 | 0.605136 | 63 | 623 | 5.888889 | 0.650794 | 0.032345 | 0.043127 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.10962 | 0.282504 | 623 | 24 | 96 | 25.958333 | 0.720358 | 0.069021 | 0 | 0.111111 | 1 | 0 | 0.134948 | 0.039792 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.111111 | 0 | 0.277778 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0022d3ced162a6cf917e5e7f2a034582d63841ce | 4,163 | py | Python | admin/handler/userHandler.py | xin1195/smart | 11815b8a63f2459300e8aaad82b539cfef8a7546 | [
"Apache-2.0"
] | 1 | 2016-05-09T12:29:47.000Z | 2016-05-09T12:29:47.000Z | admin/handler/userHandler.py | xin1195/smartSearch | 11815b8a63f2459300e8aaad82b539cfef8a7546 | [
"Apache-2.0"
] | null | null | null | admin/handler/userHandler.py | xin1195/smartSearch | 11815b8a63f2459300e8aaad82b539cfef8a7546 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python3
# _*_coding:utf-8_*_
import hashlib
import traceback
import tornado.web
from tornado import gen
from admin.handler.baseHandler import BaseHandler
from common.authLib import auth_permissions
from setting import logger
class AdminUserHandler(BaseHandler):
@tornado.web.authenticated
@auth_permissions
@gen.coroutine
def get(self, *args, **kwargs):
res_msg = ""
users = []
num = int(self.get_argument("num", 15))
page = int(self.get_argument("page", 1))
total_count = 0
try:
query = {}
show = {"_id": 0}
cursor = self.db.sys_user.find(query, show).skip((page - 1) * num).limit(num)
while (yield cursor.fetch_next):
user = cursor.next_object()
users.append(user)
total_count = yield self.db.sys_user.find().count()
except:
logger.error(traceback.format_exc())
self.render("admin/sys_user_list.html", users=users, res_msg=res_msg, total_count=total_count, page=page, num=num)
class AdminUserAddHandler(BaseHandler):
@tornado.web.authenticated
@auth_permissions
@gen.coroutine
def get(self, *args, **kwargs):
res_msg = ""
user = {}
self.render("admin/sys_user_add.html", res_msg=res_msg, form_action="/admin/user/add", user=user)
@auth_permissions
@gen.coroutine
def post(self, *args, **kwargs):
username = self.get_argument("username", "")
password = self.get_argument("password", "")
email = self.get_argument("email", "")
tell_phone = self.get_argument("tell_phone", "")
try:
salt = hashlib.md5(username.encode('utf-8')).hexdigest()
hash_password = hashlib.sha256((password + salt).encode('utf-8')).hexdigest()
user_dict = {
"username": username,
"password": hash_password,
"email": email,
"tell_phone": tell_phone,
}
query = {"username": username}
yield self.db.sys_user.update(query, user_dict, upsert=True)
except:
logger.error(traceback.format_exc())
self.redirect("/admin/user")
class AdminUserUpdateHandler(BaseHandler):
@tornado.web.authenticated
@auth_permissions
@gen.coroutine
def get(self, *args, **kwargs):
res_msg = ""
user = {}
try:
username = self.get_argument("username", "")
query = {"username": username}
show = {"_id": 0}
user = yield self.db.sys_user.find_one(query, show)
except:
logger.error(traceback.format_exc())
self.render("admin/sys_user_add.html", user=user, res_msg=res_msg, form_action="/admin/user/update")
@auth_permissions
@gen.coroutine
def post(self, *args, **kwargs):
username = self.get_argument("username", "")
password = self.get_argument("password", "")
email = self.get_argument("email", "")
tell_phone = self.get_argument("tell_phone", "")
try:
user_dict = {
"username": username,
"email": email,
"tell_phone": tell_phone,
}
if password:
salt = hashlib.md5(username.encode('utf-8')).hexdigest()
hash_password = hashlib.sha256((password + salt).encode('utf-8')).hexdigest()
user_dict["password"] = hash_password
query = {"username": username}
yield self.db.sys_user.update(query, {"$set": user_dict}, upsert=True)
except:
logger.error(traceback.format_exc())
self.redirect("/admin/user")
class AdminUserDeleteHandler(BaseHandler):
@tornado.web.authenticated
@auth_permissions
@gen.coroutine
def get(self, *args, **kwargs):
try:
username = self.get_argument("username", "")
query = {"username": username}
self.db.sys_user.remove(query)
except:
logger.error(traceback.format_exc())
self.redirect("/admin/user")
| 34.404959 | 122 | 0.585876 | 455 | 4,163 | 5.191209 | 0.208791 | 0.035563 | 0.076207 | 0.068586 | 0.700677 | 0.69348 | 0.651143 | 0.635902 | 0.609653 | 0.563082 | 0 | 0.007019 | 0.281288 | 4,163 | 120 | 123 | 34.691667 | 0.78242 | 0.009608 | 0 | 0.688679 | 0 | 0 | 0.083718 | 0.016986 | 0 | 0 | 0 | 0 | 0 | 1 | 0.056604 | false | 0.066038 | 0.066038 | 0 | 0.160377 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
0027d54f621e5da6f3af054395a4bc3184233f40 | 7,923 | py | Python | trainers/expname_trainer.py | Hhhhhhhhhhao/image-cartoonization | 073b51656b96b069496917d212119caad7bf4728 | [
"MIT"
] | null | null | null | trainers/expname_trainer.py | Hhhhhhhhhhao/image-cartoonization | 073b51656b96b069496917d212119caad7bf4728 | [
"MIT"
] | null | null | null | trainers/expname_trainer.py | Hhhhhhhhhhao/image-cartoonization | 073b51656b96b069496917d212119caad7bf4728 | [
"MIT"
] | null | null | null | import torch
from torchvision.utils import make_grid
import numpy as np
from base import BaseTrainer
from models import Generator, Discriminator
from losses import *
from data_loaders import CartoonDataLoader
from utils import MetricTracker
class ExpnameTrainer(BaseTrainer):
def __init__(self, config):
super(ExpnameTrainer, self).__init__(config)
self.logger.info("Creating data loaders...")
self.train_dataloader, self.valid_dataloader = self._build_dataloader()
self.log_step = int(np.sqrt(self.train_dataloader.batch_size))
self.logger.info("Creating model architecture...")
gen, disc = self._build_model()
# resume
if self.config.resume is not None:
self._resume_checkpoint(config.resume)
# move to device
self.gen = gen.to(self.device)
self.disc = disc.to(self.device)
if len(self.device_ids) > 1:
self.gen = torch.nn.DataParallel(self.gen, device_ids=self.device_ids)
self.disc = torch.nn.DataParallel(self.disc, device_ids=self.device_ids)
self.logger.info("Creating optimizers...")
self.gen_optim, self.disc_optim = self._build_optimizer(self.gen, self.disc)
# build loss
self.logger.info("Creating losses...")
self._build_criterion()
self.logger.info("Creating metric trackers...")
self._build_metrics()
def _build_dataloader(self):
train_dataloader = CartoonDataLoader(
data_dir=self.config.data_dir,
src_style='real',
tar_style=self.config.tar_style,
batch_size=self.config.batch_size,
image_size=self.config.image_size,
num_workers=self.config.num_workers)
valid_dataloader = train_dataloader.split_validation()
return train_dataloader, valid_dataloader
def _build_model(self):
""" build generator and discriminator model """
gen = Generator(self.config.image_size, self.config.down_size, self.config.num_res, self.config.skip_conn)
disc = Discriminator(self.config.image_size, self.config.down_size)
return gen, disc
def _build_optimizer(self, gen, disc):
""" build generator and discriminator optimizers """
gen_optim = torch.optim.AdamW(
gen.parameters(),
lr=self.config.g_lr,
weight_decay=self.config.weight_decay,
betas=(0.5, 0.999))
disc_optim = torch.optim.AdamW(
disc.parameters(),
lr=self.config.d_lr,
weight_decay=self.config.weight_decay,
betas=(0.5, 0.999))
return gen_optim, disc_optim
def _build_criterion(self):
self.adv_criterion = eval('{}Loss'.format(self.config.adv_criterion))()
# TODO add extra criterion you need here
def _build_metrics(self):
# TODO: add the loss you want to log here
self.metric_names = ['disc', 'gen']
self.train_metrics = MetricTracker(*[metric for metric in self.metric_names], writer=self.writer)
self.valid_metrics = MetricTracker(*[metric for metric in self.metric_names], writer=self.writer)
def _train_epoch(self, epoch):
"""
Training logic for an epoch
:param epoch: Integer, current training epoch.
:return: A log that contains average loss and metric in this epoch.
"""
self.gen.train()
self.disc.train()
self.train_metrics.reset()
for batch_idx, (src_imgs, tar_imgs) in enumerate(self.train_dataloader):
src_imgs, tar_imgs = src_imgs.to(self.device), tar_imgs.to(self.device)
self.gen_optim.zero_grad()
self.disc_optim.zero_grad()
raise NotImplementedError
# ============ Generation ============ #
# ============ train D ============ #
# ============ train G ============ #
# ============ log ============ #
self.writer.set_step((epoch - 1) * len(self.train_dataloader) + batch_idx)
# TODO: add the loss you want to log here
if batch_idx % self.log_step == 0:
self.logger.info('Train Epoch: {:d} {:d} Disc. Loss: {:.4f} Gen. Loss {:.4f}'.format(
epoch,
self._progress(batch_idx),
disc_loss.item(),
gen_loss.item()))
log = self.train_metrics.result()
val_log = self._valid_epoch(epoch)
log.update(**{'val_'+k : v for k, v in val_log.items()})
# shuffle data loader
self.train_dataloader.shuffle()
return log
def _valid_epoch(self, epoch):
"""
Validate after training an epoch
:param epoch: Integer, current training epoch.
:return: A log that contains information about validation
"""
self.gen.eval()
self.disc.eval()
disc_losses = []
gen_losses = []
self.valid_metrics.reset()
with torch.no_grad():
for batch_idx, (src_imgs, tar_imgs) in enumerate(self.valid_dataloader):
src_imgs, tar_imgs = src_imgs.to(self.device), tar_imgs.to(self.device)
# TODO similar to train but not optimizer.step()
raise NotImplementedError
# ============ Generation ============ #
# ============ D Loss ============ #
# ============ G Loss ============ #
# log losses
self.writer.set_step(epoch)
self.valid_metrics.update('disc', np.mean(disc_losses))
self.valid_metrics.update('gen', np.mean(gen_losses))
# log images
src_tar_imgs = torch.cat([src_imgs.cpu(), fake_tar_imgs.cpu()], dim=-1)
self.writer.add_image('src2tar', make_grid(src_tar_imgs.cpu(), nrow=1, normalize=True))
return self.valid_metrics.result()
def _save_checkpoint(self, epoch):
"""
Saving checkpoints
:param epoch: current epoch number
:param log: logging information of the epoch
:param save_best: if True, rename the saved checkpoint to 'model_best.pth'
"""
state = {
'epoch': epoch,
'gen_state_dict': self.gen.state_dict() if len(self.device_ids) <= 1 else self.gen.module.state_dict(),
'disc_state_dict': self.disc.state_dict() if len(self.device_ids) <= 1 else self.disc.module.state_dict(),
'gen_optim': self.gen_optim.state_dict(),
'disc_optim': self.disc_optim.state_dict()
}
filename = str(self.config.checkpoint_dir + 'current.pth')
torch.save(state, filename)
self.logger.info("Saving checkpoint: {} ...".format(filename))
if epoch % self.save_period == 0:
filename = str(self.config.checkpoint_dir + 'epoch{}.pth'.format(epoch))
torch.save(state, filename)
self.logger.info("Saving checkpoint: {} ...".format(filename))
def _resume_checkpoint(self, resume_path):
"""
Resume from saved checkpoints
:param resume_path: Checkpoint path to be resumed
"""
resume_path = str(resume_path)
self.logger.info("Loading checkpoint: {} ...".format(resume_path))
checkpoint = torch.load(resume_path)
self.start_epoch = checkpoint['epoch'] + 1
# load architecture params from checkpoint.
self.gen.load_state_dict(checkpoint['gen_state_dict'])
self.disc.load_state_dict(checkpoint['disc_state_dict'])
# load optimizer state from checkpoint only when optimizer type is not changed.
self.gen_optim.load_state_dict(checkpoint['gen_optim'])
self.disc_optim.load_state_dict(checkpoint['disc_optim'])
self.logger.info("Checkpoint loaded. Resume training from epoch {}".format(self.start_epoch))
| 38.461165 | 118 | 0.607724 | 949 | 7,923 | 4.87039 | 0.198103 | 0.043271 | 0.03029 | 0.023799 | 0.266768 | 0.224794 | 0.194721 | 0.194721 | 0.178711 | 0.165729 | 0 | 0.004123 | 0.265304 | 7,923 | 205 | 119 | 38.64878 | 0.789899 | 0.155244 | 0 | 0.097561 | 0 | 0.00813 | 0.071703 | 0 | 0 | 0 | 0 | 0.014634 | 0 | 1 | 0.081301 | false | 0 | 0.065041 | 0 | 0.195122 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0035cda9c6da37575e7853b710767c9b07b845e5 | 870 | py | Python | python-scripts/contours/Generalize-orienteering-contours.py | JiriVales/orienteering-tools | 3bac9e75c2116be35d3a59ffa203e8e736a2b66c | [
"BSD-3-Clause"
] | 5 | 2018-05-31T07:41:51.000Z | 2020-04-16T09:20:34.000Z | python-scripts/contours/Generalize-orienteering-contours.py | JiriVales/automatic-creation-orienteering-map | 3bac9e75c2116be35d3a59ffa203e8e736a2b66c | [
"BSD-3-Clause"
] | null | null | null | python-scripts/contours/Generalize-orienteering-contours.py | JiriVales/automatic-creation-orienteering-map | 3bac9e75c2116be35d3a59ffa203e8e736a2b66c | [
"BSD-3-Clause"
] | null | null | null | ##Generalize orienteering contours=name
##maximumdistancebetweentheoriginalandthesimplifiedcurvedouglaspeuckeralgorithm=number4
##contours=vector
##min=string37
##generalizecontours=output vector
outputs_QGISFIELDCALCULATOR_1=processing.runalg('qgis:fieldcalculator', contours,'length',0,10.0,2.0,True,'round($length,2)',None)
outputs_QGISEXTRACTBYATTRIBUTE_1=processing.runalg('qgis:extractbyattribute', outputs_QGISFIELDCALCULATOR_1['OUTPUT_LAYER'],'length',3,min,None)
outputs_GRASS7V.GENERALIZE.SIMPLIFY_1=processing.runalg('grass7:v.generalize.simplify', outputs_QGISEXTRACTBYATTRIBUTE_1['OUTPUT'],0,maximumdistancebetweentheoriginalandthesimplifiedcurvedouglaspeuckeralgorithm,7.0,50.0,False,True,None,-1.0,0.0001,0,None)
outputs_QGISDELETECOLUMN_1=processing.runalg('qgis:deletecolumn', outputs_GRASS7V.GENERALIZE.SIMPLIFY_1['output'],'length',generalizecontours) | 96.666667 | 255 | 0.858621 | 93 | 870 | 7.870968 | 0.419355 | 0.060109 | 0.092896 | 0.086066 | 0.090164 | 0 | 0 | 0 | 0 | 0 | 0 | 0.040936 | 0.017241 | 870 | 9 | 256 | 96.666667 | 0.815205 | 0.208046 | 0 | 0 | 0 | 0 | 0.214706 | 0.075 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0039d60278d356a8f3ab1e0dec5ab95cb6e993cf | 382 | py | Python | tester.py | Edrig/CircuitPython_TB6612FNG | 3f89725ff437dd46e84a93b4b3525d61de46a740 | [
"MIT"
] | null | null | null | tester.py | Edrig/CircuitPython_TB6612FNG | 3f89725ff437dd46e84a93b4b3525d61de46a740 | [
"MIT"
] | null | null | null | tester.py | Edrig/CircuitPython_TB6612FNG | 3f89725ff437dd46e84a93b4b3525d61de46a740 | [
"MIT"
] | null | null | null |
from time import sleep as delay
import fng
m1 = fng.Fng(board.ENA, board.IN1, board.IN2)
m1 = fng.Fng(board.ENB, board.IN3, board.IN4)
m1.write(0, 'cw')
m2.write(0, 'cw')
delay(1)
m1.write(300, 'cw')
delay(2)
m2.write(300, 'cw')
delay(2)
m1.write(0, 'cw')
m2.write(0, 'cw')
delay(1)
m1.write(300, 'ccw')
delay(2)
m2.write(300, 'ccw')
delay(2)
m1.write(0, 'cw')
m2.write(0, 'cw') | 15.916667 | 45 | 0.641361 | 78 | 382 | 3.141026 | 0.294872 | 0.146939 | 0.195918 | 0.122449 | 0.595918 | 0.42449 | 0.42449 | 0.42449 | 0.42449 | 0.42449 | 0 | 0.119403 | 0.123037 | 382 | 24 | 46 | 15.916667 | 0.61194 | 0 | 0 | 0.6 | 0 | 0 | 0.057592 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.1 | 0 | 0.1 | 0 | 0 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
003d3854efd76cbac152e06a2062c7b5a20382d7 | 677 | py | Python | app/__init__.py | Abdulrahmannaser/QRC_generator | dd1d50ac4ba0bbe3ce12475ad49a3c7f459f136d | [
"MIT"
] | null | null | null | app/__init__.py | Abdulrahmannaser/QRC_generator | dd1d50ac4ba0bbe3ce12475ad49a3c7f459f136d | [
"MIT"
] | null | null | null | app/__init__.py | Abdulrahmannaser/QRC_generator | dd1d50ac4ba0bbe3ce12475ad49a3c7f459f136d | [
"MIT"
] | null | null | null | import logging
import os
from flask import Flask, request, current_app
from flask_sqlalchemy import SQLAlchemy
from config import Config
db = SQLAlchemy()
def create_app(config_class=Config):
app = Flask(__name__)
app.config.from_object(config_class)
db.init_app(app)
from app.errors import bp as errors_bp
app.register_blueprint(errors_bp)
from app.main import bp as main_bp
app.register_blueprint(main_bp)
from app.api import bp as api_bp
app.register_blueprint(api_bp, url_prefix='/api')
app.logger.setLevel(logging.INFO)
app.logger.info('QR Generator startup')
return app
from app import models
| 13.27451 | 53 | 0.726736 | 100 | 677 | 4.71 | 0.34 | 0.059448 | 0.063694 | 0.140127 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.20384 | 677 | 50 | 54 | 13.54 | 0.87384 | 0 | 0 | 0 | 0 | 0 | 0.03653 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.05 | false | 0 | 0.45 | 0 | 0.55 | 0.15 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
0041a01733c5a5a6382b7693a41c05e29aca872c | 28,622 | py | Python | src/act/atlas/aCTPanda2Xrsl.py | ATLASControlTower/aCT | fb841bddbe086db9f0d620167c4a11ae4634ef4f | [
"Apache-2.0"
] | null | null | null | src/act/atlas/aCTPanda2Xrsl.py | ATLASControlTower/aCT | fb841bddbe086db9f0d620167c4a11ae4634ef4f | [
"Apache-2.0"
] | null | null | null | src/act/atlas/aCTPanda2Xrsl.py | ATLASControlTower/aCT | fb841bddbe086db9f0d620167c4a11ae4634ef4f | [
"Apache-2.0"
] | 4 | 2018-02-05T11:25:20.000Z | 2018-07-19T09:53:13.000Z | import cgi
import json
import os
import re
import time
import uuid
class aCTPanda2Xrsl:
def __init__(self, pandadbjob, siteinfo, osmap, tmpdir, atlasconf, log):
self.log = log
self.pandajob = pandadbjob['pandajob']
self.jobdesc = cgi.parse_qs(self.pandajob)
self.pandajobid = pandadbjob['id']
self.pandaid = self.jobdesc['PandaID'][0]
self.xrsl = {}
self.siteinfo = siteinfo
self.ncores = siteinfo['corecount']
self.prodSourceLabel = self.jobdesc['prodSourceLabel'][0]
self.resourcetype = self.jobdesc.get('resourceType', ['None'])[0]
self.defaults = {}
self.defaults['memory'] = 2000
self.defaults['cputime'] = 2*1440*60
self.memory = self.defaults['memory']
self.sitename = pandadbjob['siteName']
self.schedconfig = siteinfo['schedconfig']
self.truepilot = siteinfo['truepilot']
self.cricjsons = siteinfo.get('cricjsons', 0)
self.osmap = osmap
self.maxwalltime = siteinfo['maxwalltime']
if self.maxwalltime == 0:
self.maxwalltime = 7*24*60
self.created = pandadbjob['created']
self.wrapper = atlasconf.get(["executable", "wrapperurl"])
if self.prodSourceLabel.startswith('rc_'):
self.wrapper = atlasconf.get(["executable", "wrapperurlrc"])
self.pilotversion = siteinfo.get('pilot_version', '2')
self.piloturl = siteinfo.get('params', {}).get('pilot_url')
if self.prodSourceLabel.startswith('rc_test'):
self.piloturl = atlasconf.get(["executable", "ptarurlrc"])
if self.pilotversion == '3':
self.piloturl = atlasconf.get(["executable", "p3tarurlrc"])
if self.prodSourceLabel.startswith('ptest'):
self.piloturl = atlasconf.get(["executable", "ptarurldev"])
if self.pilotversion == '3':
self.piloturl = atlasconf.get(["executable", "p3tarurldev"])
if not self.truepilot and not self.piloturl:
self.piloturl = atlasconf.get(["executable", "ptarurl"])
if self.pilotversion == '3':
self.piloturl = atlasconf.get(["executable", "p3tarurl"])
self.tmpdir = tmpdir
self.inputfiledir = os.path.join(self.tmpdir, 'inputfiles')
self.inputjobdir = os.path.join(self.inputfiledir, self.jobdesc['PandaID'][0])
self.atlasconf = atlasconf
self.eventranges = pandadbjob['eventranges']
self.traces = []
try:
self.schedulerid = json.loads(pandadbjob['metadata'].decode())['schedulerid']
except:
self.schedulerid = atlasconf.get(["panda", "schedulerid"])
self.rtesites = ["BEIJING-CS-TH-1A_MCORE","BEIJING-ERAII_MCORE","BEIJING-TIANJIN-TH-1A_MCORE","LRZ-LMU_MUC1_MCORE"]#,"LRZ-LMU_MUC_MCORE1"]#"MPPMU-DRACO_MCORE","MPPMU-HYDRA_MCORE"]
self.atlasrelease = None
self.monitorurl = atlasconf.get(["monitor", "apfmon"])
# ES merge jobs need unique guids because pilot uses them as dict keys
if not self.truepilot and 'eventServiceMerge' in self.jobdesc and self.jobdesc['eventServiceMerge'][0] == 'True':
if self.pandajob.startswith('GUID'):
esjobdesc = self.pandajob[self.pandajob.find('&'):]
else:
esjobdesc = self.pandajob[:self.pandajob.find('&GUID')] + self.pandajob[self.pandajob.find('&', self.pandajob.find('&GUID')+5):]
esjobdesc += '&GUID=%s' % '%2C'.join(['DUMMYGUID%i' % i for i in range(len(self.jobdesc['GUID'][0].split(',')))])
self.pandajob = esjobdesc
#print self.jobdesc.keys()
def getNCores(self):
# Unified panda queues: always use coreCount from job description
try:
self.ncores = int(self.jobdesc.get('coreCount', [1])[0])
except: # corecount is NULL
self.ncores = 1
self.xrsl['count'] = '(count=%d)' % self.ncores
# force single-node jobs for now
if self.ncores > 1:
self.xrsl['countpernode'] = '(countpernode=%d)' % self.ncores
return self.ncores
def setJobname(self):
if 'jobName' in self.jobdesc:
jobname = self.jobdesc['jobName'][0]
else:
jobname = "pandajob"
self.xrsl['jobname'] = '(jobname = "%s")' % jobname
def setDisk(self):
if self.sitename not in ['UIO_CLOUD', 'DE-TARDIS']:
return
# Space for data created by the job
if 'maxDiskCount' in self.jobdesc:
disk = int(self.jobdesc['maxDiskCount'][0])
else:
disk = 500
# Add input file sizes
# Skip since files are read from cache
#if 'fsize' in self.jobdesc:
# disk += sum([int(f) for f in self.jobdesc['fsize'][0].split(',')]) // 1000000
# Add safety factor
disk += 2000
self.log.debug('%s: disk space %d' % (self.pandaid, disk))
self.xrsl['disk'] = "(disk = %d)" % disk
def setTime(self):
if 'maxCpuCount' in self.jobdesc:
cpucount = int(self.jobdesc['maxCpuCount'][0])
# hack for group production!!!
if cpucount == 600:
cpucount = 24*3600
self.log.info('%s: job maxCpuCount %d' % (self.pandaid, cpucount))
else:
cpucount = self.defaults['cputime']
self.log.info('%s: Using default maxCpuCount %d' % (self.pandaid, cpucount))
if cpucount <= 0:
cpucount = self.defaults['cputime']
walltime = cpucount // 60
# Jedi underestimates walltime increase by 50% for now
walltime = walltime * 1.5
# for NDGF, analysis recaling
if self.sitename in [ 'ANALY_ARNES_DIRECT', 'ARNES', 'DCSC', 'HPC2N', 'LUNARC', 'NSC', 'UIO', 'UIO_CLOUD', 'UNIBE-LHEP', 'UNIBE-LHEP-UBELIX', 'UNIBE-LHEP-UBELIX_MCORE_LOPRI', 'UNIGE-BAOBAB'] and self.prodSourceLabel in ['user']:
walltime = walltime * 1.5
# for large core count
if self.getNCores() > 15:
walltime = walltime * 2
# JEDI analysis hack
walltime = max(60, walltime)
walltime = min(self.maxwalltime, walltime)
# For truepilot use queue maxwalltime
# SFU wants job walltime
if self.truepilot and 'CA-SFU-T2' not in self.sitename and 'WATERLOO' not in self.sitename:
walltime = self.maxwalltime
if self.sitename.startswith('Vega'):
walltime = max (360, walltime)
cputime = self.getNCores() * walltime
if self.sitename.startswith('BOINC'):
if self.sitename == 'BOINC_LONG':
walltime = min(1200, walltime)
else:
walltime = min(240, walltime)
cputime = walltime
self.log.info('%s: walltime: %d, cputime: %d, maxtime: %d' % (self.pandaid, walltime, cputime, self.maxwalltime))
self.xrsl['time'] = '(walltime=%d)(cputime=%d)' % (walltime, cputime)
def setMemory(self):
if 'minRamCount' in self.jobdesc and int(self.jobdesc['minRamCount'][0]) > 0:
memory = int(self.jobdesc['minRamCount'][0])
elif not self.prodSourceLabel in ('user', 'panda'):
memory = 4000
else:
memory = 2000
if memory <= 0:
memory = self.defaults['memory']
# fix until maxrrs in pandajob is better known
if memory <= 1000:
memory = 1000
if self.sitename == 'BOINC_MCORE':
memory = 2400
# hack mcore pile, use new convention for memory
# fix memory to 500MB units (AF fix before divide)
memory = (memory-1)//500*500 + 500
if self.getNCores() > 1:
# hack for 0 ramcount, defaulting to 4000, see above, fix to 2000/core
if memory == 4000:
memory = 2000
else:
memory = memory // self.getNCores()
# min 0.5G/core
if memory <= 250:
memory = 250
else:
# Min 2GB for single core
memory = max(memory, 2000)
if self.sitename in ['Vega', 'Vega_largemem']:
memory = memory / 2
if self.sitename == 'MPPMU_MCORE' and memory < 2000:
memory = 2000
self.xrsl['memory'] = '(memory = %d)' % (memory)
self.memory = memory
def setRTE(self):
# Non-RTE setup only requires ATLAS-SITE and possibly ENV/PROXY
if self.truepilot:
#self.xrsl['rtes'] = "(runtimeenvironment = ENV/PROXY)(runtimeenvironment = APPS/HEP/ATLAS-SITE-LCG)"
self.xrsl['rtes'] = '(runtimeenvironment="ENV/PROXY")'
return
if self.prodSourceLabel in ('user', 'panda') and 'BOINC' not in self.sitename:
self.xrsl['rtes'] = '(runtimeenvironment="ENV/PROXY")(runtimeenvironment="APPS/HEP/ATLAS-SITE")'
return
if self.sitename not in self.rtesites:
self.xrsl['rtes'] = '(runtimeenvironment="APPS/HEP/ATLAS-SITE")'
return
# Old-style RTE setup
atlasrtes = []
for (package, cache) in zip(self.jobdesc['swRelease'][0].split('\n'), self.jobdesc['homepackage'][0].split('\n')):
if cache.find('Production') > 1 and cache.find('AnalysisTransforms') < 0:
rte = package.split('-')[0].upper() + '-' + cache.split('/')[1]
elif cache.find('AnalysisTransforms') != -1:
rte = package.upper()
res = re.match('AnalysisTransforms-(.+)_(.+)', cache)
if res is not None:
if res.group(1).find('AtlasProduction') != -1:
rte = "ATLAS-" + res.group(2)
else:
rte = "ATLAS-" + res.group(1).upper() + "-" + res.group(2)
else:
rte = cache.replace('Atlas', 'Atlas-').replace('/', '-').upper()
rte = str(rte)
rte = rte.replace('ATLAS-', '')
rte += "-"+self.jobdesc['cmtConfig'][0].upper()
if cache.find('AnalysisTransforms') < 0:
rte = rte.replace('PHYSICS-', 'ATLASPHYSICS-')
rte = rte.replace('PROD2-', 'ATLASPROD2-')
rte = rte.replace('PROD1-', 'ATLASPROD1-')
rte = rte.replace('DERIVATION-', 'ATLASDERIVATION-')
rte = rte.replace('P1HLT-', 'ATLASP1HLT-')
rte = rte.replace('TESTHLT-', 'ATLASTESTHLT-')
rte = rte.replace('CAFHLT-', 'ATLASCAFHLT-')
rte = rte.replace('21.0.13.1','ATLASPRODUCTION-21.0.13.1')
rte = rte.replace('21.0.20.1','ATLASPRODUCTION-21.0.20.1')
if cache.find('AnalysisTransforms') != -1:
res=re.match(r'(21\..+)',rte)
if res is not None:
rte = rte.replace('21','OFFLINE-21')
if rte.find('NULL') != -1:
rte = 'PYTHON-CVMFS-X86_64-SLC6-GCC47-OPT'
atlasrtes.append(rte)
self.xrsl['rtes'] = ""
for rte in atlasrtes[-1:]:
self.xrsl['rtes'] += "(runtimeenvironment = APPS/HEP/ATLAS-" + rte + ")"
if self.prodSourceLabel in ('user', 'panda'):
self.xrsl['rtes'] += "(runtimeenvironment = ENV/PROXY)"
self.atlasrelease = ",".join(atlasrtes)
def setExecutable(self):
self.xrsl['executable'] = "(executable = runpilot2-wrapper.sh)"
def getJobType(self):
return 'user' if self.prodSourceLabel in ['user', 'panda'] else 'managed'
def getResourceType(self):
if self.resourcetype != 'None':
return self.resourcetype
resource = 'SCORE'
if self.ncores > 1:
resource = 'MCORE'
if self.memory > self.defaults['memory']:
resource += '_HIMEM'
return resource
def setArguments(self):
#pargs = '"-q" "%s" "-r" "%s" "-s" "%s" "-d" "-j" "%s" "--pilot-user" "ATLAS" "-w" "generic" "--job-type" "%s" "--resource-type" "%s"' \
pargs = '"-q" "%s" "-r" "%s" "-s" "%s" "-j" "%s" "--pilot-user" "ATLAS" "-w" "generic" "--job-type" "%s" "--resource-type" "%s" "--pilotversion" "%s"' \
% (self.schedconfig, self.sitename, self.sitename, self.prodSourceLabel, self.getJobType(), self.getResourceType(), self.pilotversion)
if self.prodSourceLabel == 'rc_alrb':
pargs += ' "-i" "ALRB"'
elif self.prodSourceLabel.startswith('rc_test'):
pargs += ' "-d" "-i" "RC"'
if self.siteinfo['python_version'].startswith('3'):
pargs += ' "--pythonversion" "3"'
if self.truepilot:
pargs += ' "--url" "https://pandaserver.cern.ch" "-p" "25443"'
if self.piloturl:
pargs += ' "--piloturl" "%s"' % (self.piloturl)
else:
pargs += ' "-z" "-t" "--piloturl" "local" "--mute"'
self.xrsl['arguments'] = '(arguments = %s)' % pargs
# Panda job hacks for specific sites
# Commented on request of Rod
#if self.sitename in ['LRZ-LMU_MUC_MCORE1', 'LRZ-LMU_MUC1_MCORE']:
if self.sitename in ['IN2P3-CC_HPC_IDRIS_MCORE']:
self.pandajob = re.sub(r'--DBRelease%3D%22all%3Acurrent%22', '--DBRelease%3D%22100.0.2%22', self.pandajob)
def setInputsES(self, inf):
for f, s, i in zip (self.jobdesc['inFiles'][0].split(","), self.jobdesc['scopeIn'][0].split(","), self.jobdesc['prodDBlockToken'][0].split(",")):
if i == 'None':
# Rucio file
lfn = '/'.join(["rucio://rucio-lb-prod.cern.ch;rucioaccount=pilot;cache=invariant/replicas", s, f])
else:
i = int(i.split("/")[0])
if i in self.osmap:
lfn = '/'.join([self.osmap[i], f])
else:
lfn = 's3://unknown/%s' % f
# TODO this exception is ignored by panda2arc
#raise Exception("No OS defined in AGIS for bucket id %d" % i)
inf[f] = lfn
def setInputs(self):
x = ""
if self.siteinfo['push']:
# create input file with job desc
pandaid = self.jobdesc['PandaID'][0]
try:
os.makedirs(self.inputjobdir)
except:
pass
tmpfile = self.inputjobdir+"/pandaJobData.out"
with open(tmpfile, "w") as f:
f.write(self.pandajob)
x += '(pandaJobData.out "%s/pandaJobData.out")' % self.inputjobdir
if self.truepilot:
x += '(runpilot2-wrapper.sh "%s")' % self.wrapper
self.xrsl['inputfiles'] = "(inputfiles = %s )" % x
return
# Wrapper
if self.eventranges: # TO FIX
x += '(runpilot2-wrapper.sh "http://aipanda404.cern.ch;cache=check/data/releases/runpilot3-wrapper-es.sh")'
else:
x += '(runpilot2-wrapper.sh "%s")' % self.wrapper
# Pilot tarball
x += '(pilot2.tar.gz "%s" "cache=check")' % self.piloturl
# Special HPCs which cannot get cric files from cvmfs or over network
if self.cricjsons:
x += '(cric_ddmendpoints.json "/cvmfs/atlas.cern.ch/repo/sw/local/etc/cric_ddmendpoints.json")'
x += '(cric_pandaqueues.json "/cvmfs/atlas.cern.ch/repo/sw/local/etc/cric_pandaqueues.json")'
# Panda queue configuration
if self.eventranges:
x += '(ARCpilot-test.tar.gz "http://aipanda404.cern.ch;cache=check/data/releases/ARCpilot-es.tar.gz")'
x += '(queuedata.json "http://pandaserver.cern.ch:25085;cache=check/cache/schedconfig/%s.all.json")' % self.schedconfig
# Input files
if 'inFiles' in self.jobdesc:
inf = {}
if 'eventServiceMerge' in self.jobdesc and self.jobdesc['eventServiceMerge'][0] == 'True':
self.setInputsES(inf)
for filename, scope, dsn, guid, token, ddmin in zip(self.jobdesc['inFiles'][0].split(","),
self.jobdesc['scopeIn'][0].split(","),
self.jobdesc['realDatasetsIn'][0].split(","),
self.jobdesc['GUID'][0].split(","),
self.jobdesc['prodDBlockToken'][0].split(","),
[None]*len(self.jobdesc['inFiles'][0].split(",")) if not self.jobdesc.get('ddmEndPointIn') else self.jobdesc['ddmEndPointIn'][0].split(",")):
# Skip files which use direct I/O: site has it enabled, token is
# not 'local', file is root file and --useLocalIO is not used
# don't use direct I/O - pending new mover switch
#if token != 'local' and self.siteinfo.get('direct_access_lan', False) and \
# not ('.tar.gz' in filename or '.lib.tgz' in filename or '.raw.' in filename) and \
# '--useLocalIO' not in self.jobdesc['jobPars'][0]:
# continue
# Hard-coded pilot rucio account - should change based on proxy
# Rucio does not expose mtime, set cache=invariant so not to download too much
#if self.sitename in ["SiGNET"]:
if self.sitename in ['ANALY_ARNES_DIRECT', 'ANALY_BOINC', 'ANALY_SiGNET_DIRECT', 'ARC-TEST', 'ARNES', 'DCSC', 'HPC2N', 'LUNARC', 'NSC', 'RIVR.UM', 'SiGNET', 'SiGNET-NSC_MCORE', 'UIO', 'UIO_CLOUD', 'UIO_CLOUD_LOPRI', 'UNIBE-LHEP', 'UNIBE-LHEP-UBELIX', 'UNIBE-LHEP-UBELIX_MCORE_LOPRI', 'UNIGE-BAOBAB']:
lfn = '/'.join(["rucio://rucio-lb-prod.cern.ch;rucioaccount=pilot;transferprotocol=https;httpgetpartial=no;cache=invariant/replicas", scope, filename])
else:
lfn = '/'.join(["rucio://rucio-lb-prod.cern.ch;rucioaccount=pilot;cache=invariant/replicas", scope, filename])
inf[filename] = lfn
dn = self.jobdesc.get('prodUserID', [])
eventType = 'get_sm'
if re.match('user', self.prodSourceLabel) or re.match('panda', self.prodSourceLabel):
eventType = 'get_sm_a'
self.traces.append({'uuid': str(uuid.uuid4()),
'scope': scope,
'filename': filename,
'dataset': dsn,
'guid': guid,
'eventVersion': 'aCT',
'timeStart': time.time(),
'usrdn': dn[0],
'localSite': ddmin,
'remoteSite': ddmin,
'eventType': eventType})
# some files are double:
for k, v in inf.items():
x += f'("{k}" "{v}")'
if 'eventService' in self.jobdesc and self.jobdesc['eventService'] and self.eventranges:
# Create tmp json file to upload with job
pandaid = self.jobdesc['PandaID'][0]
tmpjsonfile = os.path.join(self.tmpdir, 'eventranges', str('%s.json' % pandaid))
jsondata = json.loads(self.eventranges)
with open(tmpjsonfile, 'w') as f:
json.dump(jsondata, f)
x += '("eventranges.json" "%s")' % tmpjsonfile
self.xrsl['inputfiles'] = "(inputfiles = %s )" % x
def setLog(self):
if 'logFile' in self.jobdesc:
logfile = self.jobdesc['logFile'][0]
else:
logfile = "LOGFILE"
self.xrsl['log'] = '(stdout = "' + logfile.replace('.tgz', '') + '")(join = yes)'
def setGMLog(self):
self.xrsl['gmlog'] = '("gmlog" = "gmlog")'
self.xrsl['rerun'] = '("rerun" = "2")'
def setOutputs(self):
if self.truepilot:
self.xrsl['outputs'] = ""
else:
# json with final heartbeat
output = '("heartbeat.json" "")'
# dynamic outputs
output += '("@output.list" "")'
self.xrsl['outputs'] = "(outputfiles = %s )" % output
def setPriority(self):
if 'currentPriority' in self.jobdesc:
#self.xrsl['priority'] = '("priority" = ' + str(int(self.jobdesc['currentPriority'][0])/100) + ')'
# need a better number
prio = 50
try:
prio = int(self.jobdesc['currentPriority'][0])
if prio < 1:
prio = 1
if prio > 0 and prio < 1001:
prio = prio * 90 // 1000
if prio > 1000 and prio < 10001:
prio = 90 + (prio - 1000) // 900
if prio > 10000:
prio = 100
except:
pass
#self.xrsl['priority'] = '("priority" = 60 )'
self.xrsl['priority'] = '(priority = %d )' % prio
if self.sitename == 'wuppertalprod_MCORE':
self.xrsl['priority'] = ""
if self.sitename == 'wuppertalprod':
self.xrsl['priority'] = ""
if self.sitename == 'wuppertalprod_HI':
self.xrsl['priority'] = ""
if self.sitename == 'ANALY_wuppertalprod':
self.xrsl['priority'] = ""
if self.sitename == 'BOINC_MCORE':
self.xrsl['priority'] = '(priority = 28)'
def setEnvironment(self):
# Set schedulerID and job log URL
environment = {}
environment['PANDA_JSID'] = self.schedulerid
schedurl = self.atlasconf.get(["joblog", "urlprefix"])
environment['GTAG'] = '%s/%s/%s/%s.out' % (schedurl, self.created.strftime('%Y-%m-%d'), self.sitename, self.pandaid)
# ATLAS_RELEASE for RTE sites
if self.atlasrelease:
environment['ATLAS_RELEASE'] = self.atlasrelease
# Vars for APFMon (truepilot only)
if self.truepilot and self.monitorurl:
environment['APFCID'] = self.pandajobid
# harvester prepends "harvester-" to the schedulerid but APFMon uses the original one
environment['APFFID'] = self.schedulerid.replace("harvester-","")
environment['APFMON'] = self.monitorurl
environment['FACTORYQUEUE'] = self.sitename
environment['PILOT_NOKILL'] = 'YES'
self.xrsl['environment'] = '(environment = %s)' % ''.join(['("%s" "%s")' % (k,v) for (k,v) in environment.items()])
def parse(self):
self.setTime()
self.setJobname()
self.setDisk()
self.setMemory()
self.setRTE()
self.setExecutable()
self.setArguments()
self.setInputs()
self.setLog()
self.setGMLog()
self.setOutputs()
self.setPriority()
self.setEnvironment()
def getXrsl(self):
return "&" + '\n'.join(self.xrsl.values())
if __name__ == '__main__':
from act.common.aCTLogger import aCTLogger
from act.common.aCTConfig import aCTConfigAPP
from datetime import datetime
logger=aCTLogger('test')
log=logger()
pandajob = "jobsetID=799&logGUID=5ba37307-e4d7-4224-82f9-ff0503622677&cmtConfig=x86_64-slc6-gcc48-opt&prodDBlocks=user.rwatari%3Auser.rwatari.1k_10mu.xm005_yp106.RDO.20161003_2_EXT0_RDO2RDOFTK_v01_all1E5ev_EXT2.99328897%2Cpanda.1110091801.467362.lib._9845189&dispatchDBlockTokenForOut=NULL%2CNULL%2CNULL&destinationDBlockToken=NULL%2CNULL%2CNULL&destinationSE=NULL&realDatasets=user.rwatari.1k_10mu.xm005_yp106.RDO.20161003_2_EXT0_PseduoTracking_v14_all1E5ev_EXT0%2F%2Cuser.rwatari.1k_10mu.xm005_yp106.RDO.20161003_2_EXT0_PseduoTracking_v14_all1E5ev_EXT1%2F%2Cuser.rwatari.1k_10mu.xm005_yp106.RDO.20161003_2_EXT0_PseduoTracking_v14_all1E5ev.log%2F&prodUserID=%2FDC%3Dch%2FDC%3Dcern%2FOU%3DOrganic+Units%2FOU%3DUsers%2FCN%3Drwatari%2FCN%3D764796%2FCN%3DRyutaro+Watari%2FCN%3Dproxy&GUID=51997D0A-850A-9044-A264-83A8986FE1C6%2C1de48e07-f37c-43e6-a343-3947342858b1&realDatasetsIn=user.rwatari.1k_10mu.xm005_yp106.RDO.20161003_2_EXT0_RDO2RDOFTK_v01_all1E5ev_EXT2%2Cpanda.1110091801.467362.lib._9845189&nSent=0&cloud=ND&StatusCode=0&homepackage=AnalysisTransforms-AtlasProduction_20.7.3.7&inFiles=user.rwatari.9557718.EXT2._000016.RDO_FTK.pool.root%2Cpanda.1110091801.467362.lib._9845189.7456421499.lib.tgz&processingType=panda-client-0.5.69-jedi-athena-trf¤tPriority=814&fsize=1140292964%2C727003478&fileDestinationSE=ANALY_SiGNET_DIRECT%2CANALY_SiGNET_DIRECT%2CANALY_SiGNET_DIRECT&scopeOut=user.rwatari%2Cuser.rwatari&minRamCount=4772&jobDefinitionID=836&scopeLog=user.rwatari&transformation=http%3A%2F%2Fpandaserver.cern.ch%3A25085%2Ftrf%2Fuser%2FrunAthena-00-00-12&maxDiskCount=3167&coreCount=1&prodDBlockToken=NULL%2CNULL&transferType=NULL&destinationDblock=user.rwatari.1k_10mu.xm005_yp106.RDO.20161003_2_EXT0_PseduoTracking_v14_all1E5ev_EXT0.104826316_sub0341667607%2Cuser.rwatari.1k_10mu.xm005_yp106.RDO.20161003_2_EXT0_PseduoTracking_v14_all1E5ev_EXT1.104826317_sub0341667608%2Cuser.rwatari.1k_10mu.xm005_yp106.RDO.20161003_2_EXT0_PseduoTracking_v14_all1E5ev.log.104826315_sub0341667610&dispatchDBlockToken=NULL%2CNULL&jobPars=-l+panda.1110091801.467362.lib._9845189.7456421499.lib.tgz+--sourceURL+https%3A%2F%2Faipanda078.cern.ch%3A25443+-r+WorkArea%2Frun%2Ffast%2F+--trf+--useLocalIO++-i+%22%5B%27user.rwatari.9557718.EXT2._000016.RDO_FTK.pool.root%27%5D%22+-o+%22%7B%27IROOT%27%3A+%5B%28%27InDetDxAOD.pool.root%27%2C+%27user.rwatari.9845189.EXT0._002324.InDetDxAOD.pool.root%27%29%2C+%28%27esd.pool.root%27%2C+%27user.rwatari.9845189.EXT1._002324.esd.pool.root%27%29%5D%7D%22++-j+%22Reco_tf.py%2520--inputRDOFile%253Duser.rwatari.9557718.EXT2._000016.RDO_FTK.pool.root%2520--outputESDFile%253Desd.pool.root%2520%2520--doAllNoise%2520False%2520--autoConfiguration%253Deverything%2520--numberOfCavernBkg%253D0%2520--postInclude%253DFTKFastSim%2FInDetDxAOD.py%2520--preExec%2520%2527rec.UserAlgs%253D%255B%2522FTKFastSim%2FFTKFastSimulation_jobOptions.py%2522%255D%253Brec.doCalo.set_Value_and_Lock%2528False%2529%253Brec.doMuon.set_Value_and_Lock%2528False%2529%253Brec.doJetMissingETTag.set_Value_and_Lock%2528False%2529%253Brec.doEgamma.set_Value_and_Lock%2528False%2529%253Brec.doMuonCombined.set_Value_and_Lock%2528False%2529%253Brec.doTau.set_Value_and_Lock%2528False%2529%253Brec.doTrigger.set_Value_and_Lock%2528False%2529%253Brec.doFTK.set_Value_and_Lock%2528True%2529%253Bfrom%2520AthenaCommon.DetFlags%2520import%2520DetFlags%253BDetFlags.all_setOn%2528%2529%253BDetFlags.FTK_setOn%2528%2529%2527%2520--maxEvents%253D-1%2520--postExec%2520r2e%253A%2520%2527ServiceMgr%252B%253DService%2528%2522BeamCondSvc%2522%2529%253BbeamCondSvc%253DServiceMgr.BeamCondSvc%253BbeamCondSvc.useDB%253DFalse%253BbeamCondSvc.posX%253D-0.0497705%253BbeamCondSvc.posY%253D1.06299%253BbeamCondSvc.posZ%253D0.0%253BbeamCondSvc.sigmaX%253D0.0251281%253BbeamCondSvc.sigmaY%253D0.0231978%253BbeamCondSvc.sigmaZ%253D0.1%253BbeamCondSvc.sigmaXY%253D-2.7745e-06%253BbeamCondSvc.tiltX%253D-1.51489e-05%253BbeamCondSvc.tiltY%253D-4.83891e-05%253B%2527%22&attemptNr=2&swRelease=Atlas-20.7.3&nucleus=NULL&maxCpuCount=0&outFiles=user.rwatari.9845189.EXT0._002324.InDetDxAOD.pool.root%2Cuser.rwatari.9845189.EXT1._002324.esd.pool.root%2Cuser.rwatari.1k_10mu.xm005_yp106.RDO.20161003_2_EXT0_PseduoTracking_v14_all1E5ev.log.9845189.002324.log.tgz&ddmEndPointOut=NDGF-T1_SCRATCHDISK%2CNDGF-T1_SCRATCHDISK%2CNDGF-T1_SCRATCHDISK&scopeIn=user.rwatari%2Cpanda&PandaID=3072596651&sourceSite=NULL&dispatchDblock=NULL%2Cpanda.1110091801.467362.lib._9845189&prodSourceLabel=user&checksum=ad%3Afd1c3aac%2Cad%3A516b31b3&jobName=user.rwatari.1k_10mu.xm005_yp106.RDO.20161003_2_EXT0_PseduoTracking_v14_all1E5ev%2F.3071213044&ddmEndPointIn=NDGF-T1_SCRATCHDISK%2CNDGF-T1_SCRATCHDISK&taskID=9845189&logFile=user.rwatari.1k_10mu.xm005_yp106.RDO.20161003_2_EXT0_PseduoTracking_v14_all1E5ev.log.9845189.002324.log.tgz"
siteinfo = {'schedconfig': 'ANALY_SiGNET_DIRECT', 'corecount': 1, 'truepilot': False, 'maxwalltime': 10800, 'direct_access_lan': True, 'type': 'analysis'}
conf = aCTConfigAPP()
pandadbjob = {'pandajob': pandajob, 'siteName': 'ANALY_SiGNET_DIRECT', 'eventranges': None, 'metadata': {}, 'created': datetime.utcnow()}
a = aCTPanda2Xrsl(pandadbjob, siteinfo, {}, '/tmp', conf, log)
a.parse()
print(a.getXrsl())
| 51.202147 | 4,787 | 0.595067 | 3,220 | 28,622 | 5.219565 | 0.245963 | 0.03207 | 0.014994 | 0.011781 | 0.289641 | 0.207235 | 0.178021 | 0.122151 | 0.110371 | 0.10091 | 0 | 0.077719 | 0.260499 | 28,622 | 558 | 4,788 | 51.293907 | 0.716338 | 0.093599 | 0 | 0.160804 | 0 | 0.025126 | 0.363963 | 0.224313 | 0 | 0 | 0 | 0.001792 | 0 | 1 | 0.050251 | false | 0.005025 | 0.025126 | 0.005025 | 0.103015 | 0.002513 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
00475608cd6682a6971f83c144a409f31b8c8578 | 2,653 | py | Python | files/targets/ios_framework/config/config.py | uilianries/ezored | 8c45a6753ddffce1fe406e6e062ff2b455dbb5a1 | [
"MIT"
] | null | null | null | files/targets/ios_framework/config/config.py | uilianries/ezored | 8c45a6753ddffce1fe406e6e062ff2b455dbb5a1 | [
"MIT"
] | null | null | null | files/targets/ios_framework/config/config.py | uilianries/ezored | 8c45a6753ddffce1fe406e6e062ff2b455dbb5a1 | [
"MIT"
] | null | null | null | def run(params={}):
return {
'project_name': 'Core',
'bitcode': True,
'min_version': '9.0',
'enable_arc': True,
'enable_visibility': True,
'conan_profile': 'ezored_ios_framework_profile',
'archs': [
{'arch': 'armv7', 'conan_arch': 'armv7', 'platform': 'OS'},
{'arch': 'armv7s', 'conan_arch': 'armv7s', 'platform': 'OS'},
{'arch': 'arm64', 'conan_arch': 'armv8', 'platform': 'OS64'},
{'arch': 'arm64e', 'conan_arch': 'armv8.3', 'platform': 'OS64'},
{'arch': 'x86_64', 'conan_arch': 'x86_64', 'platform': 'SIMULATOR64'},
],
'build_types': ['Debug', 'Release'],
'install_headers': [
{
'type': 'dir',
'path': 'files/djinni/001-app-domain/generated-src/objc',
},
{
'type': 'dir',
'path': 'files/djinni/002-app-core/generated-src/objc',
},
{
'type': 'dir',
'path': 'files/djinni/003-app-data-services/generated-src/objc',
},
{
'type': 'dir',
'path': 'files/djinni/004-app-system-service/generated-src/objc',
},
{
'type': 'dir',
'path': 'files/djinni/005-app-helpers/generated-src/objc',
},
{
'type': 'dir',
'path': 'files/djinni/datetime/generated-src/objc',
},
{
'type': 'dir',
'path': 'files/djinni/file-helper/generated-src/objc',
},
{
'type': 'dir',
'path': 'files/djinni/httpclient/generated-src/objc',
},
{
'type': 'dir',
'path': 'files/djinni/shared-data/generated-src/objc',
},
{
'type': 'dir',
'path': 'files/djinni/logger/generated-src/objc',
},
{
'type': 'dir',
'path': 'files/djinni/string-helper/generated-src/objc',
},
{
'type': 'dir',
'path': 'files/src/file-helper/objc',
},
{
'type': 'dir',
'path': 'files/src/httpclient/objc',
},
{
'type': 'dir',
'path': 'files/src/logger/objc',
},
{
'type': 'dir',
'path': 'files/src/shared-data/objc',
},
]
}
| 33.1625 | 82 | 0.392009 | 214 | 2,653 | 4.780374 | 0.317757 | 0.102639 | 0.16129 | 0.234604 | 0.506354 | 0.484848 | 0.414467 | 0.414467 | 0 | 0 | 0 | 0.027796 | 0.430456 | 2,653 | 79 | 83 | 33.582278 | 0.649239 | 0 | 0 | 0.189873 | 0 | 0 | 0.413117 | 0.234075 | 0 | 0 | 0 | 0 | 0 | 1 | 0.012658 | false | 0 | 0 | 0.012658 | 0.025316 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
0047860f4215f9cb9e1a1456ad978d457fffaf49 | 1,821 | py | Python | ecomm/models.py | areebbeigh/greenspace-demo | 0754f3b50e845bd5e50239361239f9b0b8aba42b | [
"Apache-2.0"
] | 1 | 2020-07-06T05:53:12.000Z | 2020-07-06T05:53:12.000Z | ecomm/models.py | areebbeigh/greenspace-demo | 0754f3b50e845bd5e50239361239f9b0b8aba42b | [
"Apache-2.0"
] | 1 | 2020-06-11T15:51:49.000Z | 2020-06-11T16:10:31.000Z | ecomm/models.py | areebbeigh/greenspace-demo | 0754f3b50e845bd5e50239361239f9b0b8aba42b | [
"Apache-2.0"
] | null | null | null | from django.db import models
from django.contrib.auth.models import Group
from django.conf import settings
from django.core.exceptions import ValidationError
from users.models import CustomUser
class Product(models.Model):
is_cleaned = False
name = models.CharField(max_length=200, null=True)
price = models.FloatField(null=True)
description = models.TextField(max_length=200, null=True, blank=True)
image = models.ImageField()
date_created = models.DateTimeField(auto_now_add=True, null=True)
owner = models.ForeignKey(CustomUser, on_delete=models.CASCADE)
def __str__(self):
return f'Product<{self.name}, {self.owner}>'
def get_image_url(self):
return self.image.url
def get_order_count(self):
return self.order_set.count()
def full_clean(self, *args, **kwargs):
super().full_clean(*args, **kwargs)
if not self.owner.is_nursery_manager():
raise ValidationError('Product owner must be a nursery manager')
# we don't want to run a clean more than once
self.is_cleaned = True
def save(self, *args, **kwargs):
if not self.is_cleaned:
self.full_clean()
super().save(*args, **kwargs)
class Order(models.Model):
STATUS = (
('Pending', 'Pending'),
('Canceled', 'Canceled'),
('Delivered', 'Delivered'),
)
buyer = models.ForeignKey(
CustomUser, on_delete=models.CASCADE, related_name='orders_placed')
seller = models.ForeignKey(
CustomUser, on_delete=models.CASCADE, related_name='orders_received')
product = models.ForeignKey(Product, on_delete=models.CASCADE)
status = models.CharField(max_length=200, choices=STATUS, default='Pending')
date_created = models.DateTimeField(auto_now_add=True, null=True)
| 33.109091 | 80 | 0.686436 | 230 | 1,821 | 5.286957 | 0.395652 | 0.032895 | 0.046053 | 0.069079 | 0.328125 | 0.229441 | 0.229441 | 0.190789 | 0.190789 | 0.190789 | 0 | 0.006173 | 0.199341 | 1,821 | 54 | 81 | 33.722222 | 0.827846 | 0.023613 | 0 | 0.04878 | 0 | 0 | 0.087838 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.121951 | false | 0 | 0.121951 | 0.073171 | 0.682927 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
004cccfb16dc272cd58bb038640a85c14a04639c | 1,130 | py | Python | cloudentries/fusioncompute/lifecycles/fc_rest_client/storage/volumes.py | CloudChef/CloudEntries | a890e2eb96cc537db131e7ca8a0e6e1edc0b6ebd | [
"Apache-2.0"
] | null | null | null | cloudentries/fusioncompute/lifecycles/fc_rest_client/storage/volumes.py | CloudChef/CloudEntries | a890e2eb96cc537db131e7ca8a0e6e1edc0b6ebd | [
"Apache-2.0"
] | null | null | null | cloudentries/fusioncompute/lifecycles/fc_rest_client/storage/volumes.py | CloudChef/CloudEntries | a890e2eb96cc537db131e7ca8a0e6e1edc0b6ebd | [
"Apache-2.0"
] | 1 | 2021-03-26T05:45:00.000Z | 2021-03-26T05:45:00.000Z | # Copyright (c) 2021 Qianyun, Inc. All rights reserved.
from ..common import FusionComputeBase
from cloudify import ctx
class VolumesClient(FusionComputeBase):
IDENTIFIER = 'volumes'
def list(self, urn, limit=100, offset=0):
uri_prefix = '/'.join([self.urn2uri(urn), self.IDENTIFIER])
pagination = "limit={limit}&offset={offset}".format(limit=limit, offset=offset)
uri = "?".join([uri_prefix, pagination])
return self.rest_client.get(uri)
def create(self, urn, config):
uri = "/".join([self.urn2uri(urn), self.IDENTIFIER])
return self.rest_client.post(uri, data=config)
def delete(self, urn, is_format=0):
uri_perfix = self.urn2uri(urn)
pagination = "isFormat={}".format(is_format)
uri = "?".join([uri_perfix, pagination])
return self.rest_client.delete(uri)
def expandvol(self, urn, config):
action = '/action/expandvol'
server_uri = self.urn2uri(urn)
uri = server_uri + action
ctx.logger.info('uri: {}, data: {}'.format(uri, config))
return self.rest_client.post(uri, data=config)
| 35.3125 | 87 | 0.645133 | 139 | 1,130 | 5.158273 | 0.352518 | 0.039052 | 0.078103 | 0.111576 | 0.276151 | 0.192469 | 0.103208 | 0.103208 | 0 | 0 | 0 | 0.014574 | 0.210619 | 1,130 | 31 | 88 | 36.451613 | 0.789238 | 0.046903 | 0 | 0.086957 | 0 | 0 | 0.07907 | 0.026977 | 0 | 0 | 0 | 0 | 0 | 1 | 0.173913 | false | 0 | 0.086957 | 0 | 0.521739 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
0053cb11deedec1d753b8d56591391ebfa9a5dad | 372 | py | Python | django/models/database_models/migrations/0007_auto_20191219_0354.py | djangojeng-e/TIL | bdbe1dfb6ebc48b89067fddda195227cca64b8dc | [
"MIT"
] | null | null | null | django/models/database_models/migrations/0007_auto_20191219_0354.py | djangojeng-e/TIL | bdbe1dfb6ebc48b89067fddda195227cca64b8dc | [
"MIT"
] | null | null | null | django/models/database_models/migrations/0007_auto_20191219_0354.py | djangojeng-e/TIL | bdbe1dfb6ebc48b89067fddda195227cca64b8dc | [
"MIT"
] | null | null | null | # Generated by Django 2.2.8 on 2019-12-19 03:54
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('database_models', '0006_ox'),
]
operations = [
migrations.AlterModelOptions(
name='ox',
options={'ordering': ['horn_length'], 'verbose_name_plural': '뿔의 길이'},
),
]
| 20.666667 | 82 | 0.594086 | 40 | 372 | 5.4 | 0.825 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.070111 | 0.271505 | 372 | 17 | 83 | 21.882353 | 0.726937 | 0.120968 | 0 | 0 | 1 | 0 | 0.206154 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.090909 | 0 | 0.363636 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
00541a090b01512eeb7f1e9e94b43fb4d3babd7b | 5,782 | py | Python | Uncategorized-pages.py | Turkce-Vikipedi-Yazilim-Deposu/Uncategorized-pages | 44fc4776404aac98fb66a00f4a3048ea173470a6 | [
"MIT"
] | null | null | null | Uncategorized-pages.py | Turkce-Vikipedi-Yazilim-Deposu/Uncategorized-pages | 44fc4776404aac98fb66a00f4a3048ea173470a6 | [
"MIT"
] | 1 | 2021-02-15T10:47:21.000Z | 2021-02-15T10:47:21.000Z | Uncategorized-pages.py | Turkce-Vikipedi-Yazilim-Deposu/Uncategorized-pages | 44fc4776404aac98fb66a00f4a3048ea173470a6 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# !/usr/bin/python
import random
import re
import requests
import mavri
wiki = 'tr.wikipedia'
wikiS = 'trwiki'
xx = mavri.login(wiki, 'Mavrikant Bot')
catNS = requests.get(
'https://' + wiki + '.org/w/api.php?format=json&utf8=&action=query&meta=siteinfo&siprop=namespaces').json()[
'query']['namespaces']['14']['*']
def add_category(page):
print page
content = mavri.content_of_page(wiki, page)
if content:
if re.findall(r'\[\[\s?' + catNS + '[^\]]*\]\]', content) != []:
content = re.sub(r'\{\{\s?[Kk]ategorisiz[^\}]*\}\}\s?\n?', '', content)
content = re.sub(r'\{\{\s?[Uu]ncategorized[^\}]*\}\}\s?\n?', '', content)
return mavri.change_page(wiki, page, content, '- Kategorisiz Şablonu', xx)
entity = mavri.wikibase_item(wiki, page)
langs = mavri.wbgetlangsofentity(entity)
langs = re.findall(r'\'([a-z]{2,3})wiki\'', str(langs))
if 'tr' in langs: langs.remove('tr')
print langs
if langs:
lang = random.choice(langs)
Swiki = lang + '.wikipedia'
Slang = lang + 'wiki'
print Swiki
print Slang
Spage = mavri.wbgetlanglink(entity, Slang)
print Spage
Scat = mavri.categories_on_page(Swiki, Spage)
print Scat
ScatNS = requests.get(
'https://' + Swiki + '.org/w/api.php?format=json&utf8=&action=query&meta=siteinfo&siprop=namespaces').json()[
'query']['namespaces']['14']['*']
cat_to_add = []
for cat in Scat:
ncat = mavri.wbgetlanglink(mavri.wikibase_item(Swiki, ScatNS + ':' + cat), 'trwiki')
print cat + ' -> ' + ncat
if ncat != '':
cat_to_add.insert(0, ncat)
print cat_to_add
content = mavri.content_of_page(wiki, page)
appendtext = ''
for cat in cat_to_add:
if re.findall(r'\[\[\s?' + cat + '\s?\|?[^\[\]]*\]\]', content) == []:
appendtext += '\n[[' + cat + ']]'
NUM = str(len(cat_to_add))
if appendtext:
content += appendtext
content = re.sub(r'\{\{\s?[Kk]ategorisiz[^\}]*\}\}\s?\n?', '', content)
content = re.sub(r'\{\{\s?[Uu]ncategorized[^\}]*\}\}\s?\n?', '', content)
diff = \
mavri.change_page(wiki, page, content, '+ ' + NUM + catNS + ', Kaynak=' + Slang, xx).json()['edit'][
'newrevid']
mavri.appendtext_on_page(wiki, 'Kullanıcı:Mavrikant_Bot/Log/Kategorisiz', '\n# [[Special:Diff/' + str(
diff) + '|' + page + ']] (+ ' + NUM + catNS + ', Kaynak=' + Slang + ')',
'[[Special:Diff/' + str(
diff) + '|' + page + ']] (+ ' + NUM + catNS + ', Kaynak=' + Slang + ')',
xx)
else:
content = mavri.content_of_page(wiki, page)
if re.findall(r'\[\[\s?' + catNS + '[^\]]*\]\]', content) == [] and re.findall(
r'\{\{\s?[Kk]ategorisiz[^\}]*\}\}', content) == []:
content = re.sub(r'\{\{\s?[Kk]ategorisiz[^\}]*\}\}\s?\n?', '', content)
content = re.sub(r'\{\{\s?[Uu]ncategorized[^\}]*\}\}\s?\n?', '', content)
diff = mavri.change_page(wiki, page,
content + '\n\n{{Kategorisiz|{{kopyala:CURRENTMONTHNAME}} {{kopyala:CURRENTYEAR}}}} ',
'+ Kategorisiz Şablonu', xx).json()['edit']['newrevid']
mavri.appendtext_on_page(wiki, 'Kullanıcı:Mavrikant_Bot/Log/Kategorisiz',
'\n# [[Special:Diff/' + str(diff) + '|' + page + ']] (Kategorisiz)',
'[[Special:Diff/' + str(diff) + '|' + page + ']] (Kategorisiz)', xx)
else:
content = mavri.content_of_page(wiki, page)
if re.findall(r'\[\[\s?' + catNS + '[^\]]*\]\]', content) == [] and re.findall(
r'\{\{\s?[Kk]ategorisiz[^\}]*\}\}', content) == []:
content = re.sub(r'\{\{\s?[Kk]ategorisiz[^\}]*\}\}\s?\n?', '', content)
content = re.sub(r'\{\{\s?[Uu]ncategorized[^\}]*\}\}\s?\n?', '', content)
diff = mavri.change_page(wiki, page,
content + '\n\n{{Kategorisiz|{{kopyala:CURRENTMONTHNAME}} {{kopyala:CURRENTYEAR}}}} ',
'+ Kategorisiz Şablonu', xx).json()['edit']['newrevid']
mavri.appendtext_on_page(wiki, 'Kullanıcı:Mavrikant_Bot/Log/Kategorisiz',
'\n# [[Special:Diff/' + str(diff) + '|' + page + ']] (Kategorisiz)',
'[[Special:Diff/' + str(diff) + '|' + page + ']] (Kategorisiz)', xx)
# Section 1
cats = mavri.pages_on_category(wiki, 'Kategori:Kategorisiz')
for line in cats:
page = line['title']
add_category(page)
# Section 2
content = requests.get(
'https://' + wiki + '.org/w/api.php?action=query&format=json&utf8&list=querypage&qppage=Uncategorizedpages&qplimit=500')
datelog = 'Kullanıcı:Mavrikant_Bot/Log/Kategorisiz/Tarih'
olddate = mavri.content_of_page(wiki, datelog)
newdate = content.json()['query']['querypage']['cachedtimestamp']
if olddate != newdate:
mavri.change_page(wiki, datelog, newdate, newdate, xx)
for line in content.json()['query']['querypage']['results']:
page = line['title']
add_category(page)
| 49.418803 | 131 | 0.479765 | 576 | 5,782 | 4.739583 | 0.208333 | 0.010256 | 0.035165 | 0.038095 | 0.610623 | 0.578022 | 0.54652 | 0.527839 | 0.496703 | 0.474725 | 0 | 0.004091 | 0.32359 | 5,782 | 116 | 132 | 49.844828 | 0.69394 | 0.010031 | 0 | 0.383838 | 0 | 0.030303 | 0.26613 | 0.161042 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.040404 | null | null | 0.080808 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
cc36c715b15cef873692996da21049266c1532d0 | 7,907 | py | Python | src/ipycbm/sources/rest_api.py | VP-GEO/cbm | 4ed229f6b6455435b6d032deb8a39dba4ecee7a2 | [
"BSD-3-Clause"
] | null | null | null | src/ipycbm/sources/rest_api.py | VP-GEO/cbm | 4ed229f6b6455435b6d032deb8a39dba4ecee7a2 | [
"BSD-3-Clause"
] | null | null | null | src/ipycbm/sources/rest_api.py | VP-GEO/cbm | 4ed229f6b6455435b6d032deb8a39dba4ecee7a2 | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# This file is part of CbM (https://github.com/ec-jrc/cbm).
# Author : Guido Lemoine, Konstantinos Anastasakis
# Credits : GTCAP Team
# Copyright : 2021 European Commission, Joint Research Centre
# License : 3-Clause BSD
from src.ipycbm.utils import config
import requests
def ploc(area, year, lon, lat, geom=False):
if area == 'es_ns':
parcels = f'es{year}_nour_subset'
else:
parcels = f'{area}{year}'
api_url, api_user, api_pass = config.credentials('api')
requrl = """{}/query/parcelByLocation?parcels={}&lon={}&lat={}"""
if geom is True:
requrl = f"{requrl}&withGeometry=True"
response = requests.get(requrl.format(api_url, parcels, lon, lat),
auth=(api_user, api_pass))
return response.content
def pid(area, year, pid, geom=False):
if area == 'es_ns':
parcels = f'es{year}_nour_subset'
else:
parcels = f'{area}{year}'
api_url, api_user, api_pass = config.credentials('api')
requrl = """{}/query/parcelById?parcels={}&parcelid={}"""
if geom is True:
requrl = f"{requrl}&withGeometry=True"
response = requests.get(requrl.format(api_url, parcels, pid),
auth=(api_user, api_pass))
return response.content
def ppoly(area, year, polygon, geom=False, only_ids=True):
if area == 'es_ns':
parcels = f'es{year}_nour_subset'
else:
parcels = f'{area}{year}'
api_url, api_user, api_pass = config.credentials('api')
requrl = """{}/query/parcelsByPolygon?parcels={}&polygon={}"""
if geom is True:
requrl = f"{requrl}&withGeometry=True"
if only_ids is True:
requrl = f"{requrl}&only_ids=True"
response = requests.get(requrl.format(api_url, parcels, polygon),
auth=(api_user, api_pass))
return response.content
def pts(area, year, pid, tstype, band=''):
if area == 'es_ns':
area = f'es'
api_url, api_user, api_pass = config.credentials('api')
requrl = """{}/query/parcelTimeSeries?aoi={}&year={}&pid={}&tstype={}&band={}"""
response = requests.get(requrl.format(api_url, area, year,
pid, tstype, band),
auth=(api_user, api_pass))
return response.content
def cbl(lon, lat, start_date, end_date, bands=None, lut=None, chipsize=None):
api_url, api_user, api_pass = config.credentials('api')
requrl = """{}/query/chipsByLocation?lon={}&lat={}&start_date={}&end_date={}"""
band = '_'.join(bands)
if band is not None:
requrl = f"{requrl}&band={band}"
if chipsize is not None:
requrl = f"{requrl}&chipsize={chipsize}"
if lut != '':
requrl = f"{requrl}&lut={lut}"
# print(requrl.format(api_url, lon, lat, start_date, end_date))
response = requests.get(requrl.format(api_url, lon, lat,
start_date, end_date),
auth=(api_user, api_pass))
return response
def rcbl(parcel, start_date, end_date, bands, sat, chipsize, filespath):
import os
import pandas as pd
from osgeo import osr, ogr
import time
start = time.time()
api_url, api_user, api_pass = config.credentials('api')
for band in bands:
requrl = """{}/query/rawChipByLocation?lon={}&lat={}&start_date={}&end_date={}"""
if band is not None:
# band = '_'.join(band)
# band = band.replace(' ', '')
requrl = f"{requrl}&band={band}"
if chipsize is not None:
requrl = f"{requrl}&chipsize={chipsize}"
# Create a valid geometry from the returned JSON withGeometry
geom = ogr.CreateGeometryFromJson(parcel.get('geom')[0])
source = osr.SpatialReference()
source.ImportFromEPSG(parcel.get('srid')[0])
# Assign this projection to the geometry
geom.AssignSpatialReference(source)
target = osr.SpatialReference()
target.ImportFromEPSG(4326)
transform = osr.CoordinateTransformation(source, target)
# And get the lon, lat for its centroid, so that we can center the chips on
# the parcel
centroid = geom.Centroid()
centroid.Transform(transform)
# Use pid for next request
pid = parcel['ogc_fid'][0]
# cropname = parcel['cropname'][0]
# Set up the rawChip request
cen_x, cen_y = str(centroid.GetX()), str(centroid.GetY())
response = requests.get(requrl.format(api_url, cen_y, cen_x, start_date,
end_date, band, chipsize),
auth=(api_user, api_pass))
# Directly create a pandas DataFrame from the json response
df = pd.read_json(response.content)
os.makedirs(os.path.dirname(filespath), exist_ok=True)
df_file = f'{filespath}{pid}_images_list.{band}.csv'
df.to_csv(df_file, index=True, header=True)
# print(f"The response table is saved to: {df_file}")
# Download the GeoTIFFs that were just created in the user cache
for c in df.chips:
url = f"{api_url}{c}"
res = requests.get(url, stream=True)
outf = f"{filespath}{c.split('/')[-1]}"
# print(f"Downloading {c.split('/')[-1]}")
with open(outf, "wb") as handle:
for chunk in res.iter_content(chunk_size=512):
if chunk: # filter out keep-alive new chunks
handle.write(chunk)
print(f"Images for band '{band}', for the selected dates are downloaded.")
# if len(df.index) != 0:
# print(f"All GeoTIFFs for band '{band}' are ",
# f"downloaded in the folder: '{filespath}'")
print("\n------Total time------")
print(f"Total time required for {len(bands)} bands: {time.time() - start} seconds.")
def clouds(geom):
import glob
import json
import rasterio
from osgeo import osr
from rasterstats import zonal_stats
# Check whether our parcel is cloud free
# We should have a list of GeoTIFFs ending with .SCL.tif
tiflist = glob.glob('*.SCL.tif')
for t in tiflist:
with rasterio.open(t) as src:
affine = src.transform
CRS = src.crs
data = src.read(1)
# Reproject the parcel geometry in the image crs
imageCRS = int(str(CRS).split(':')[-1])
# Cross check with the projection of the geometry
# This needs to be done for each image, because the parcel could be in
# a straddle between (UTM) zones
geomCRS = int(geom.GetSpatialReference().GetAuthorityCode(None))
if geomCRS != imageCRS:
target = osr.SpatialReference()
target.ImportFromEPSG(imageCRS)
source = osr.SpatialReference()
source.ImportFromEPSG(geomCRS)
transform = osr.CoordinateTransformation(source, target)
geom.Transform(transform)
# Format as a feature collection (with only 1 feature)
# and extract the histogram
features = {"type": "FeatureCollection",
"features": [{"type": "feature",
"geometry": json.loads(geom.ExportToJson()),
"properties": {"pid": pid}}]}
zs = zonal_stats(features, data, affine=affine, prefix="",
nodata=0, categorical=True, geojson_out=True)
# This has only one record
properties = zs[0].get('properties')
# pid was used as a dummy key to make sure the histogram
# values are in 'properties'
del properties['pid']
histogram = {int(float(k)): v for k, v in properties.items()}
# print(t, histogram)
| 36.776744 | 89 | 0.587201 | 969 | 7,907 | 4.703818 | 0.296182 | 0.018429 | 0.026327 | 0.036858 | 0.383063 | 0.289601 | 0.273146 | 0.245722 | 0.245722 | 0.180562 | 0 | 0.004603 | 0.28557 | 7,907 | 214 | 90 | 36.948598 | 0.802266 | 0.193246 | 0 | 0.362963 | 0 | 0.007407 | 0.162094 | 0.087985 | 0 | 0 | 0 | 0 | 0 | 1 | 0.051852 | false | 0.088889 | 0.111111 | 0 | 0.2 | 0.022222 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
cc3769add829a8e19c188057b46207ee4e43828c | 2,019 | py | Python | parsers/humbleparser.py | FilipDominec/gayplot | aabeee33f9b594952e88e31d94eead20b21ed674 | [
"MIT"
] | 6 | 2016-09-26T16:30:46.000Z | 2020-04-29T17:30:01.000Z | parsers/humbleparser.py | FilipDominec/gayplot | aabeee33f9b594952e88e31d94eead20b21ed674 | [
"MIT"
] | 5 | 2016-08-25T15:51:35.000Z | 2018-06-14T17:12:42.000Z | parsers/humbleparser.py | FilipDominec/gayplot | aabeee33f9b594952e88e31d94eead20b21ed674 | [
"MIT"
] | 4 | 2017-06-07T14:40:29.000Z | 2019-05-09T14:17:55.000Z | #!/usr/bin/python3
#-*- coding: utf-8 -*-
"""
A simplistic attempt to parse simple procedural languages that control e.g. MOVPE apparatuses gas flow (not finished)
"""
import sys,re
time = 0
labels = {}
tableheader = [] ## TODO TODO efficient accumulation of data w/ possibility of new variables in the middle of EPI recipe
tablevalues = []
tabletiming = []
with open(sys.argv[1], encoding='latin1') as epifile:
lines = epifile.readlines()
for n, line in enumerate(lines):
#line = '1:020 " Setup: lateral growth", NH3_2.run close, TMGa_1.run close, N2.line close, N2.run open,'
## Detect and remember labels
if len(line.strip())>=1 and line.strip()[-1] == '{':
labels[line.strip()] = n
continue
timematch = re.match('^\\s*\\d?\\d?:?\\d+', line)
timedelim = timematch.end() if timematch else 0
namematch = re.search('"[^"]*"', line)
namedelim = namematch.end() if namematch else timedelim
cmntmatch = re.search('#', line)
cmntdelim = cmntmatch.end()-1 if cmntmatch else 1000000
timestr, namestr, cmdsstr = line[:timedelim].strip(), line[timedelim:namedelim].strip(), line[namedelim:cmntdelim].strip()
print("DEBUG: timestr, = ", timestr,)
print("DEBUG: namestr, = ", namestr,)
print("DEBUG: cmdsstr = ", cmdsstr)
for cmd in [c.strip().strip(';') for c in cmdsstr.split(',') if c.strip()!='']:
print(time, " DEBUG: cmd = ", cmd)
if ' to ' in cmd:
variable, value = [c.strip() for c in cmd.split(' to ', 1)]
variables[variable]
## TODO if cmd == "GOTO OR WHATEVER": n = labels[JUMPTO]; continue
## Advance the time
if timematch:
if ':' in timestr: time += 60*int(timestr.split(':')[0]) + int(timestr.split(':')[1])
else: time += int(timestr);
| 38.826923 | 131 | 0.549777 | 238 | 2,019 | 4.655462 | 0.453782 | 0.024368 | 0.018051 | 0.019856 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.021171 | 0.298167 | 2,019 | 51 | 132 | 39.588235 | 0.760762 | 0.232293 | 0 | 0 | 0 | 0 | 0.077074 | 0 | 0 | 0 | 0 | 0.019608 | 0 | 1 | 0 | false | 0 | 0.033333 | 0 | 0.033333 | 0.133333 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
cc387d9a15dc07a58a1aef7e27c7f9ae23b2fbd9 | 1,735 | py | Python | Back-End/src/Projects/migrations/0001_initial.py | steve-njuguna-k/Django-Angular-Projects-Manager | 569348d1cdf29e9789526db180d83ad969302bec | [
"MIT"
] | 1 | 2022-02-01T17:53:54.000Z | 2022-02-01T17:53:54.000Z | Back-End/src/Projects/migrations/0001_initial.py | steve-njuguna-k/Django-Angular-Projects-Manager | 569348d1cdf29e9789526db180d83ad969302bec | [
"MIT"
] | null | null | null | Back-End/src/Projects/migrations/0001_initial.py | steve-njuguna-k/Django-Angular-Projects-Manager | 569348d1cdf29e9789526db180d83ad969302bec | [
"MIT"
] | null | null | null | # Generated by Django 3.2.7 on 2021-09-13 14:53
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Categories',
fields=[
('CategoryID', models.AutoField(primary_key=True, serialize=False)),
('CategoryName', models.CharField(max_length=50)),
],
),
migrations.CreateModel(
name='Members',
fields=[
('MemberID', models.AutoField(primary_key=True, serialize=False)),
('MemberName', models.CharField(max_length=50)),
],
),
migrations.CreateModel(
name='Tasks',
fields=[
('TaskID', models.AutoField(primary_key=True, serialize=False)),
('TaskName', models.CharField(max_length=50)),
('MemberAllocated', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='Projects.members')),
],
),
migrations.CreateModel(
name='Projects',
fields=[
('ProjectID', models.AutoField(primary_key=True, serialize=False)),
('ProjectName', models.CharField(max_length=50)),
('Budget', models.IntegerField()),
('DueBy', models.DateField()),
('MemberAllocated', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='Projects.members')),
('ProjectCategory', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='Projects.categories')),
],
),
]
| 35.408163 | 126 | 0.564265 | 152 | 1,735 | 6.368421 | 0.381579 | 0.041322 | 0.057851 | 0.090909 | 0.577479 | 0.52376 | 0.52376 | 0.346074 | 0.240702 | 0.240702 | 0 | 0.018977 | 0.301441 | 1,735 | 48 | 127 | 36.145833 | 0.779703 | 0.025937 | 0 | 0.439024 | 1 | 0 | 0.125 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.04878 | 0 | 0.146341 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
cc4119e45e98875e9f656da518c2b1e11eebeacb | 5,483 | py | Python | elro/device.py | dib0/elro_connects | 19412a5be4f924f2ccf82801999cc7fc639155b3 | [
"MIT"
] | 16 | 2019-10-12T12:50:36.000Z | 2022-01-24T17:39:36.000Z | elro/device.py | depuits/elro_connects | eb2cf1df67ef0ade811e278b52520449e0941d23 | [
"MIT"
] | 23 | 2021-03-01T16:40:39.000Z | 2022-03-25T12:54:21.000Z | elro/device.py | depuits/elro_connects | eb2cf1df67ef0ade811e278b52520449e0941d23 | [
"MIT"
] | 7 | 2020-12-27T20:18:20.000Z | 2022-01-21T15:39:18.000Z | from enum import Enum
from abc import ABC, abstractmethod
import logging
import json
import trio
class DeviceType(Enum):
"""
The DeviceType defines which kind of Elro device this is
"""
CO_ALARM = "0000"
WATER_ALARM = "0004"
HEAT_ALARM = "0003"
FIRE_ALARM = "0005"
DOOR_WINDOW_SENSOR = "0101"
class Device(ABC):
"""
A Device is an Elro device that is connected to the system
"""
def __init__(self, device_id, device_type):
"""
Constructor
:param device_id: The device ID
:param device_type: The device type
"""
self.id = device_id
self._name = ""
self._battery_level = -1
self._device_state = ""
self.device_type = device_type
self.updated = trio.Event()
self.alarm = trio.Event()
@property
def name(self):
"""
The name of the device
:return: The name
"""
return self._name
@name.setter
def name(self, name):
self._name = name
self._send_update_event()
@property
def device_state(self):
"""
The current state of the device as a string
:return: The device state
"""
return self._device_state
@device_state.setter
def device_state(self, device_state):
self._device_state = device_state
self._send_update_event()
@property
def battery_level(self):
"""
The current battery level of the device in percent.
:return: The battery level
"""
return self._battery_level
@battery_level.setter
def battery_level(self, battery_level):
self._battery_level = battery_level
self._send_update_event()
def _send_update_event(self):
"""
Triggers the self.updated event
"""
self.updated.set()
self.updated = trio.Event()
def send_alarm_event(self):
"""
Triggers the self.alarm event.
"""
self.alarm.set()
self.alarm = trio.Event()
def update(self, data):
"""
Updates this device with the data received from the actual device
:param data: The data dict received from the actual device
"""
self.device_type = data["data"]["device_name"]
# set battery status
batt = int(data["data"]["device_status"][2:4], 16)
self.battery_level = batt
self.device_state = "Unknown"
self.update_specifics(data)
self._send_update_event()
@abstractmethod
def update_specifics(self, data):
"""
An abstract method that is called to update type specific things.
:param data: The data dict received from the actual device
"""
pass
def __str__(self):
return f"<{self.device_type}: {self.name} (id: {self.id})>"
def __repr__(self):
return str(self)
@property
def json(self):
"""
A json representation of the device.
:return: A str containing json.
"""
return json.dumps({"name": self.name,
"id": self.id,
"type": self.device_type,
"state": self.device_state,
"battery": self.battery_level})
class WindowSensor(Device):
"""
A sensor that can detect open/close state of a window.
"""
def __init__(self, device_id):
"""
Constructor
:param device_id: The device ID
"""
super().__init__(device_id, "0101")
def update_specifics(self, data):
"""
Updates the window "Open"/"Closed" state
:param data: The data dict received from the actual device
"""
if data["data"]["device_name"] != DeviceType.DOOR_WINDOW_SENSOR.value:
AttributeError(f"Tried to update a window sensor to type "
f"{DeviceType(data['data']['device_name'])}")
if data["data"]["device_status"][4:-2] == "55":
logging.debug("Door/window id " + str(self.id) + " open!")
self.device_state = "Open"
elif data["data"]["device_status"][4:-2] == "AA":
logging.debug("Door/window id " + str(self.id) + " closed!")
self.device_state = "Closed"
class AlarmSensor(Device):
"""
A device that can ring an alarm (HeatAlarm, WaterAlarm, FireAlarm, COAlarm)
"""
def __init__(self, device_id, device_type):
"""
Constructor
:param device_id: The device ID
:param device_type: The device type
"""
super().__init__(device_id, device_type)
def update_specifics(self, data):
"""
Updates the alarm state of the device.
:param data: The data dict received from the actual device
"""
if data["data"]["device_status"][4:-2] == "BB":
self.device_state = "Alarm"
elif data["data"]["device_status"][4:-2] == "AA":
self.device_state = "Normal"
def create_device_from_data(data):
"""
Factory method to create a device from a data dict
:param data: The data dict received from the actual device
:return: A Device object
"""
if data["data"]["device_name"] == DeviceType.DOOR_WINDOW_SENSOR.value:
return WindowSensor(data["data"]["device_ID"])
else:
return AlarmSensor(data["data"]["device_ID"], data["data"]["device_name"]) | 28.409326 | 82 | 0.579245 | 653 | 5,483 | 4.67075 | 0.179173 | 0.055738 | 0.055082 | 0.041311 | 0.394426 | 0.285246 | 0.265574 | 0.22 | 0.18 | 0.18 | 0 | 0.010293 | 0.308955 | 5,483 | 193 | 82 | 28.409326 | 0.794669 | 0.250775 | 0 | 0.202128 | 0 | 0 | 0.117891 | 0.01132 | 0 | 0 | 0 | 0 | 0 | 1 | 0.202128 | false | 0.010638 | 0.053191 | 0.021277 | 0.43617 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
cc41a55ea2dfa59c42e722abf1fec1c6d875b50b | 1,301 | py | Python | accounts/test_forms.py | vmcggh18/bits_tracker | 7c09aae321efb13979bed274d973c77319ce795e | [
"PostgreSQL"
] | null | null | null | accounts/test_forms.py | vmcggh18/bits_tracker | 7c09aae321efb13979bed274d973c77319ce795e | [
"PostgreSQL"
] | 7 | 2020-06-05T19:50:41.000Z | 2022-03-11T23:39:39.000Z | accounts/test_forms.py | vmcggh18/bits_tracker | 7c09aae321efb13979bed274d973c77319ce795e | [
"PostgreSQL"
] | 1 | 2019-02-19T15:30:16.000Z | 2019-02-19T15:30:16.000Z | from django.test import TestCase
from .forms import UserLoginForm, UserRegistrationForm
from django import forms
from django.contrib.auth.models import User
class TestTrackerUserLoginForm(TestCase):
def test_can_create_an_item_with_just_a_username(self):
#instantiate this object using a dictionary
form = UserLoginForm({'username': 'Create Tests'})
self.assertFalse(form.is_valid())
def test_correct_message_for_missing_name(self):
form = UserLoginForm({'username': ''})
self.assertFalse(form.is_valid())
self.assertEqual(form.errors['username'], [u'This field is required.'])
def test_registerationForm_data_is_valid(self):
valid_data = {"email": "user@example.com",
"password": "secret",
"confirm": "secret"}
form = UserRegistrationForm(data=valid_data)
form.is_valid()
self.assertTrue(form.errors)
def test_passwords_must_match(self):
invalid_data = {"email": "user@example.com",
"password1": "secret",
"password2": "anothersecret"}
form = UserRegistrationForm(data=invalid_data)
self.assertFalse(form.is_valid())
self.assertEqual(form.errors['password2'], ['Passwords must match'])
| 35.162162 | 79 | 0.663336 | 140 | 1,301 | 5.964286 | 0.428571 | 0.041916 | 0.052695 | 0.075449 | 0.208383 | 0.122156 | 0.122156 | 0.122156 | 0.122156 | 0 | 0 | 0.002973 | 0.224443 | 1,301 | 37 | 80 | 35.162162 | 0.824579 | 0.032283 | 0 | 0.115385 | 0 | 0 | 0.154091 | 0 | 0 | 0 | 0 | 0 | 0.230769 | 1 | 0.153846 | false | 0.192308 | 0.153846 | 0 | 0.346154 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
cc42421a931372ab5f620c8ee6b79cd292923921 | 986 | py | Python | problems/tests/test_fizz_buzz.py | mdu-alt/coding_challenges_python | 171876129b17b5eccd8dd20c7f5b61b3e0e6ef90 | [
"MIT"
] | null | null | null | problems/tests/test_fizz_buzz.py | mdu-alt/coding_challenges_python | 171876129b17b5eccd8dd20c7f5b61b3e0e6ef90 | [
"MIT"
] | null | null | null | problems/tests/test_fizz_buzz.py | mdu-alt/coding_challenges_python | 171876129b17b5eccd8dd20c7f5b61b3e0e6ef90 | [
"MIT"
] | null | null | null | from problems.fizz_buzz import FizzBuzz
def test_fizz_buzz():
output = FizzBuzz.compute()
assert output == ['1', '2', 'fizz', '4', 'buzz', 'fizz', '7', '8', 'fizz', 'buzz', '11', 'fizz', '13', '14',
'fizzbuzz', '16', '17', 'fizz', '19', 'buzz', 'fizz', '22', '23', 'fizz', 'buzz', '26', 'fizz',
'28', '29', 'fizzbuzz', '31', '32', 'fizz', '34', 'buzz', 'fizz', '37', '38', 'fizz', 'buzz',
'41', 'fizz', '43', '44', 'fizzbuzz', '46', '47', 'fizz', '49', 'buzz', 'fizz', '52', '53',
'fizz', 'buzz', '56', 'fizz', '58', '59', 'fizzbuzz', '61', '62', 'fizz', '64', 'buzz', 'fizz',
'67', '68', 'fizz', 'buzz', '71', 'fizz', '73', '74', 'fizzbuzz', '76', '77', 'fizz', '79',
'buzz', 'fizz', '82', '83', 'fizz', 'buzz', '86', 'fizz', '88', '89', 'fizzbuzz', '91', '92',
'fizz', '94', 'buzz', 'fizz', '97', '98', 'fizz', 'buzz']
| 65.733333 | 117 | 0.403651 | 115 | 986 | 3.434783 | 0.556522 | 0.182278 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.142454 | 0.280933 | 986 | 14 | 118 | 70.428571 | 0.414669 | 0 | 0 | 0 | 0 | 0 | 0.317444 | 0 | 0 | 0 | 0 | 0 | 0.090909 | 1 | 0.090909 | false | 0 | 0.090909 | 0 | 0.181818 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
cc45c79609b6cc7f52c4f517f7f951560289401d | 2,232 | py | Python | src/environment_provider/logs/log_area.py | andjoe-axis/etos-environment-provider | e95f3e67a22ee08382265ea55070f9cd98b544e4 | [
"Apache-2.0"
] | null | null | null | src/environment_provider/logs/log_area.py | andjoe-axis/etos-environment-provider | e95f3e67a22ee08382265ea55070f9cd98b544e4 | [
"Apache-2.0"
] | 15 | 2020-09-28T12:01:46.000Z | 2022-03-11T11:38:04.000Z | src/environment_provider/logs/log_area.py | t-persson/etos-environment-provider | bf01dc2fe8f989bfc5c97dd543a15a15f0383540 | [
"Apache-2.0"
] | 4 | 2020-09-25T11:16:20.000Z | 2021-02-03T12:21:36.000Z | # Copyright 2020 Axis Communications AB.
#
# For a full list of individual contributors, please see the commit history.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Log area provider data module."""
class LogArea:
"""Log area data object."""
_log_area_dictionary = None
def __init__(self, **log_area):
"""Take a dictionary as input and setattr on instance.
:param log_area: Dictionary to set attributes from.
:type log_area: dict
"""
self._log_area_dictionary = log_area
for key, value in log_area.items():
setattr(self, key, value)
def __setattr__(self, name, value):
"""Set log area parameters to dict and object.
:param name: Name of parameter to set.
:type name: str
:param value: Value of parameter.
:type value: any
"""
if self._log_area_dictionary is not None:
self._log_area_dictionary[name] = value
super().__setattr__(name, value)
def update(self, **dictionary):
"""Update log area dictionary with new data.
:param dictionary: Dictionary to update attributes from.
:type dictionary: dict
"""
self._log_area_dictionary.update(**dictionary)
for key, value in dictionary.items():
setattr(self, key, value)
@property
def as_dict(self):
"""Represent log area as dictionary.
:return: Log area dictionary.
:rtype: dict
"""
return self._log_area_dictionary
def __repr__(self):
"""Represent log area as string.
:return: Log area dictionary as string.
:rtype: str
"""
return repr(self._log_area_dictionary)
| 31 | 76 | 0.651434 | 290 | 2,232 | 4.865517 | 0.382759 | 0.09922 | 0.13253 | 0.089298 | 0.100638 | 0 | 0 | 0 | 0 | 0 | 0 | 0.004869 | 0.263889 | 2,232 | 71 | 77 | 31.43662 | 0.853926 | 0.560036 | 0 | 0.105263 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.263158 | false | 0 | 0 | 0 | 0.473684 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
cc480975390bdb48e4e84b9194466af0a9cb6296 | 544 | py | Python | wazimap_ng/datasets/migrations/0090_auto_20200507_0306.py | arghyaiitb/wazimap-ng | 2a77860526d865b8fd0c22a2204f121fdb3b28a0 | [
"Apache-2.0"
] | 11 | 2019-12-31T20:27:22.000Z | 2022-03-10T03:55:38.000Z | wazimap_ng/datasets/migrations/0090_auto_20200507_0306.py | arghyaiitb/wazimap-ng | 2a77860526d865b8fd0c22a2204f121fdb3b28a0 | [
"Apache-2.0"
] | 164 | 2020-02-06T15:02:22.000Z | 2022-03-30T22:42:00.000Z | wazimap_ng/datasets/migrations/0090_auto_20200507_0306.py | arghyaiitb/wazimap-ng | 2a77860526d865b8fd0c22a2204f121fdb3b28a0 | [
"Apache-2.0"
] | 16 | 2020-01-03T20:30:24.000Z | 2022-01-11T11:05:15.000Z |
# Generated by Django 2.2.10 on 2020-05-07 03:06
import django.core.validators
from django.db import migrations, models
import wazimap_ng.datasets.models.upload
class Migration(migrations.Migration):
dependencies = [
('datasets', '0089_auto_20200509_2036'),
]
operations = [
migrations.AlterField(
model_name='dataset',
name='permission_type',
field=models.CharField(choices=[('private', 'Private'), ('public', 'Public')], default='public', max_length=32),
),
]
| 24.727273 | 124 | 0.645221 | 60 | 544 | 5.733333 | 0.75 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.080378 | 0.222426 | 544 | 21 | 125 | 25.904762 | 0.732861 | 0.084559 | 0 | 0 | 1 | 0 | 0.171717 | 0.046465 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.214286 | 0 | 0.428571 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
cc4906edea15ca13a154d701ab1df543b2b7b9bf | 406 | py | Python | ward_mapping/migrations/0002_auto_20190516_1329.py | Suraj1127/ward-mapping-application | 53fa39bab875ca47fdab814fd28ea0b7d2086c15 | [
"MIT"
] | 1 | 2019-05-16T04:08:40.000Z | 2019-05-16T04:08:40.000Z | ward_mapping/migrations/0002_auto_20190516_1329.py | Suraj1127/ward-mapping-application | 53fa39bab875ca47fdab814fd28ea0b7d2086c15 | [
"MIT"
] | null | null | null | ward_mapping/migrations/0002_auto_20190516_1329.py | Suraj1127/ward-mapping-application | 53fa39bab875ca47fdab814fd28ea0b7d2086c15 | [
"MIT"
] | null | null | null | # Generated by Django 2.2 on 2019-05-16 07:44
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('ward_mapping', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='map2011',
name='old_survey_ward_code',
field=models.CharField(max_length=30, unique=True),
),
]
| 21.368421 | 63 | 0.6133 | 45 | 406 | 5.377778 | 0.822222 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.081633 | 0.275862 | 406 | 18 | 64 | 22.555556 | 0.741497 | 0.105911 | 0 | 0 | 1 | 0 | 0.141274 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.083333 | 0 | 0.333333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
cc4f865ffb4e361cabf92cb0fc88fc033229ffc2 | 7,287 | py | Python | file_repository_sdk/api/archive/get_difference_pb2.py | easyopsapis/easyops-api-python | adf6e3bad33fa6266b5fa0a449dd4ac42f8447d0 | [
"Apache-2.0"
] | 5 | 2019-07-31T04:11:05.000Z | 2021-01-07T03:23:20.000Z | file_repository_sdk/api/archive/get_difference_pb2.py | easyopsapis/easyops-api-python | adf6e3bad33fa6266b5fa0a449dd4ac42f8447d0 | [
"Apache-2.0"
] | null | null | null | file_repository_sdk/api/archive/get_difference_pb2.py | easyopsapis/easyops-api-python | adf6e3bad33fa6266b5fa0a449dd4ac42f8447d0 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: get_difference.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from file_repository_sdk.model.file_repository import diff_pb2 as file__repository__sdk_dot_model_dot_file__repository_dot_diff__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='get_difference.proto',
package='archive',
syntax='proto3',
serialized_options=None,
serialized_pb=_b('\n\x14get_difference.proto\x12\x07\x61rchive\x1a\x34\x66ile_repository_sdk/model/file_repository/diff.proto\"~\n\x14GetDifferenceRequest\x12\x10\n\x08ver_from\x18\x01 \x01(\t\x12\x0e\n\x06ver_to\x18\x02 \x01(\t\x12\x11\n\tdiff_file\x18\x03 \x01(\t\x12\x0c\n\x04path\x18\x04 \x01(\t\x12\x10\n\x08\x65ncoding\x18\x05 \x01(\t\x12\x11\n\tpackageId\x18\x06 \x01(\t\"u\n\x1cGetDifferenceResponseWrapper\x12\x0c\n\x04\x63ode\x18\x01 \x01(\x05\x12\x13\n\x0b\x63odeExplain\x18\x02 \x01(\t\x12\r\n\x05\x65rror\x18\x03 \x01(\t\x12#\n\x04\x64\x61ta\x18\x04 \x01(\x0b\x32\x15.file_repository.Diffb\x06proto3')
,
dependencies=[file__repository__sdk_dot_model_dot_file__repository_dot_diff__pb2.DESCRIPTOR,])
_GETDIFFERENCEREQUEST = _descriptor.Descriptor(
name='GetDifferenceRequest',
full_name='archive.GetDifferenceRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='ver_from', full_name='archive.GetDifferenceRequest.ver_from', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='ver_to', full_name='archive.GetDifferenceRequest.ver_to', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='diff_file', full_name='archive.GetDifferenceRequest.diff_file', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='path', full_name='archive.GetDifferenceRequest.path', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='encoding', full_name='archive.GetDifferenceRequest.encoding', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='packageId', full_name='archive.GetDifferenceRequest.packageId', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=87,
serialized_end=213,
)
_GETDIFFERENCERESPONSEWRAPPER = _descriptor.Descriptor(
name='GetDifferenceResponseWrapper',
full_name='archive.GetDifferenceResponseWrapper',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='code', full_name='archive.GetDifferenceResponseWrapper.code', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='codeExplain', full_name='archive.GetDifferenceResponseWrapper.codeExplain', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='error', full_name='archive.GetDifferenceResponseWrapper.error', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='data', full_name='archive.GetDifferenceResponseWrapper.data', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=215,
serialized_end=332,
)
_GETDIFFERENCERESPONSEWRAPPER.fields_by_name['data'].message_type = file__repository__sdk_dot_model_dot_file__repository_dot_diff__pb2._DIFF
DESCRIPTOR.message_types_by_name['GetDifferenceRequest'] = _GETDIFFERENCEREQUEST
DESCRIPTOR.message_types_by_name['GetDifferenceResponseWrapper'] = _GETDIFFERENCERESPONSEWRAPPER
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
GetDifferenceRequest = _reflection.GeneratedProtocolMessageType('GetDifferenceRequest', (_message.Message,), {
'DESCRIPTOR' : _GETDIFFERENCEREQUEST,
'__module__' : 'get_difference_pb2'
# @@protoc_insertion_point(class_scope:archive.GetDifferenceRequest)
})
_sym_db.RegisterMessage(GetDifferenceRequest)
GetDifferenceResponseWrapper = _reflection.GeneratedProtocolMessageType('GetDifferenceResponseWrapper', (_message.Message,), {
'DESCRIPTOR' : _GETDIFFERENCERESPONSEWRAPPER,
'__module__' : 'get_difference_pb2'
# @@protoc_insertion_point(class_scope:archive.GetDifferenceResponseWrapper)
})
_sym_db.RegisterMessage(GetDifferenceResponseWrapper)
# @@protoc_insertion_point(module_scope)
| 43.118343 | 616 | 0.758062 | 907 | 7,287 | 5.786108 | 0.175303 | 0.04878 | 0.05202 | 0.030488 | 0.572218 | 0.510671 | 0.510671 | 0.510671 | 0.496189 | 0.496189 | 0 | 0.035998 | 0.119391 | 7,287 | 168 | 617 | 43.375 | 0.78183 | 0.044188 | 0 | 0.625 | 1 | 0.006944 | 0.205692 | 0.161276 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.041667 | 0 | 0.041667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
cc51e6f93cd0950e7541b5984e91a6165fd05bb5 | 11,977 | py | Python | PyNotion.py | stephanGarland/PyNotion | 74460e4792758c740b4e779772f734f97d7ad371 | [
"MIT"
] | 9 | 2017-11-29T04:01:22.000Z | 2022-02-06T09:19:24.000Z | PyNotion.py | stephanGarland/PyNotion | 74460e4792758c740b4e779772f734f97d7ad371 | [
"MIT"
] | 3 | 2021-09-01T20:51:32.000Z | 2021-09-03T16:30:48.000Z | PyNotion.py | stephanGarland/PyNotion | 74460e4792758c740b4e779772f734f97d7ad371 | [
"MIT"
] | 1 | 2021-09-02T19:28:44.000Z | 2021-09-02T19:28:44.000Z | import requests
import json
from string import Template
from models.user import User
from models.task import Task
from models.system import System
from models.threshold import Threshold
from models.user_preferences import UserPreferences
from models.alert_perferences import AlertPerferences
from models.base_station import BaseStation
from models.devices import Device
from models.event import Event
from models.location import Location
from models.sensor import Sensor
from models.task_data import TaskData
BASE_URL = "https://api.getnotion.com/api/"
HEADERS = {'content-type': 'application/json'}
AUTH_HEADER = '{"Authorization": "Token token=${token}"}'
class PyNotion:
def __init__(self):
self.r = requests
self.auth_token = None
def get_token(self, user_name, password):
"""
Opens a new session
:param user_name:
:param password:
:return: a user object
"""
url = "{0}users/sign_in".format(BASE_URL)
t = Template('{"sessions": {"email": "${email}", "password": "${password}"}}')
data = t.substitute(email=user_name, password=password)
results = self.r.post(url, data=data, headers=HEADERS)
if results.status_code == 200:
return User(**results.json()['users'])
return results.json()
def kill_token(self):
"""
:return: bool to indication success or failure
"""
url = "{0}users/sign_out".format(BASE_URL)
results = self.r.delete(url, headers=self._get_auth_header())
return results.status_code == 204
def get_my_info(self, user_id):
"""
:return: user object
"""
url = "{}/users/{}".format(BASE_URL, user_id)
results = self.r.get(url, headers=self._get_auth_header()).json()
return User(**results['users'])
def delete_user(self, user_id):
url = "{}/users/{}".format(BASE_URL, user_id)
results = self.r.get(url, headers=self._get_auth_header())
return results.status_code == 204
def get_sensors(self):
"""
:return: list of sensors
"""
url = "{}/sensors/".format(BASE_URL)
results = self.r.get(url, headers=self._get_auth_header()).json()
sensors = []
for sensor in results['sensors']:
sensors.append(Sensor(**sensor))
return sensors
def get_tasks(self):
"""
:return: list of tasks
"""
url = "{}/tasks/".format(BASE_URL)
results = self.r.get(url, headers=self._get_auth_header()).json()
tasks = []
for task in results['tasks']:
tasks.append(Task(**task))
return tasks
def get_task(self, task_id):
"""
:param task_id:
:return: task object
"""
url = "{}/tasks/{}/".format(BASE_URL, task_id)
results = self.r.get(url, headers=self._get_auth_header()).json()
return Task(**results['tasks'])
def delete_task(self, task_id):
"""
:param task_id:
:return: Boolean
"""
url = "{}/tasks/{}/".format(BASE_URL, task_id)
results = self.r.delete(url, headers=self._get_auth_header())
return results.status_code == 204
def get_task_data(self, task_id, data_before, data_after):
# ToDo - Figure out parameters
"""
:param: data_before 2017-01-01T12:00:00.000Z
:param: data_after 2017-01-01T12:00:00.000Z
:param: task_id
:return: task data
"""
url = "{}/tasks/{}/data/?data_before={}&data_after={}".format(BASE_URL, task_id, data_before, data_after)
header = self._get_auth_header()
results = self.r.get(url, headers=header)
try:
return TaskData(**results.json()['task'])
except KeyError as e:
print("Unable to retrieve task data Date format should be '2017-01-01T12:00:00.000Z'.")
def get_systems(self):
"""
:return: list of systems
"""
url = "{}/systems/".format(BASE_URL)
results = self.r.get(url, headers=self._get_auth_header()).json()
systems = []
for system in results['systems']:
systems.append(System(**system))
return systems
def get_system(self, system_id):
"""
:param system_id:
:return: system details
"""
url = "{}/systems/{}/".format(BASE_URL, system_id)
results = self.r.get(url, headers=self._get_auth_header()).json()
return System(**results['systems'])
def delete_system(self, system_id):
"""
:return: Boolean
"""
url = "{}/systems/{}/".format(BASE_URL, system_id)
results = self.r.delete(url, headers=self._get_auth_header())
return results.status_code == 204
def get_thresholds(self):
"""
:return: list of thresholds
"""
# ToDo - Create/Update Threshold Class
raise NotImplementedError
url = "{}/thresholds/".format(BASE_URL)
results = self.r.get(url, headers=self._get_auth_header()).json()
thresholds = []
for threshold in results['thresholds']:
thresholds.append(Threshold(**threshold))
return thresholds
def get_threshold(self, threshold_id):
"""
:param threshold_id:
:return: threshold details
"""
raise NotImplementedError
url = "{}/thresholds/{}/".format(BASE_URL, threshold_id)
results = self.r.get(url, headers=self._get_auth_header()).json()
return Threshold(**results['thresholds'])
def delete_threshold(self, threshold_id):
"""
:param threshold_id:
:return: Boolean
"""
url = "{}/thresholds/{}/".format(BASE_URL, threshold_id)
results = self.r.delete(url, headers=self._get_auth_header())
return results.status_code == 204
def get_user_preferences(self, user_id):
"""
:return: user preference details
"""
url = "{}/users/{}/user_preferences/".format(BASE_URL, user_id)
results = self.r.get(url, headers=self._get_auth_header()).json()
return UserPreferences(**results['user_preferences'])
def get_alert_preferences(self, task_id):
"""
:param task_id:
:return: list of alert_preferences
"""
url = "{}/tasks/{}/alert_preferences".format(BASE_URL, task_id)
results = self.r.get(url, headers=self._get_auth_header()).json()
alert_preferences = []
for alert in results['alert_preferences']:
alert_preferences.append(AlertPerferences(**alert))
return alert_preferences
def get_alert_preference(self, task_id, preference_id):
"""
:param task_id:
:param preference_id:
:return: alert_preference details
"""
url = "{}/tasks/{}/alert_preferences/{}/".format(BASE_URL, task_id, preference_id)
results = self.r.get(url, headers=self._get_auth_header()).json()
return AlertPerferences(**results['alert_preferences'])
def delete_alert_preference(self, task_id, preference_id):
"""
:param task_id:
:param preference_id:
:return: Boolean
"""
url = "{}/tasks/{}/alert_preferences/{}/".format(BASE_URL, task_id, preference_id)
results = self.r.delete(url, headers=self._get_auth_header())
return results.status_code == 204
def get_bridges(self):
"""
:return: list of bridges
"""
url = "{}/base_stations/".format(BASE_URL)
results = self.r.get(url, headers=self._get_auth_header()).json()
base_stations = []
for base_station in results['base_stations']:
base_stations.append(BaseStation(**base_station))
return base_stations
def get_bridge(self, bridge_id):
"""
:param bridge_id:
:return: bridge details
"""
if bridge_id is None:
return "Bridge ID is a required parameter"
url = "{}/base_stations/{}/".format(BASE_URL, bridge_id)
results = self.r.get(url, headers=self._get_auth_header()).json()
return BaseStation(**results['base_stations'])
def delete_bridge(self, bridge_id):
"""
:param bridge_id:
:return: Boolean
"""
if bridge_id is None:
return "Bridge ID is a required parameter"
url = "{}/base_stations/{}/".format(BASE_URL, bridge_id)
results = self.r.delete(url, headers=self._get_auth_header())
return results.status_code == 204
def get_devices(self):
"""
:return: list of devices
"""
url = "{}/devices/".format(BASE_URL)
results = self.r.get(url, headers=self._get_auth_header()).json()
devices = []
for device in results['devices']:
devices.append(Device(**device))
return devices
def get_device(self, device_id):
"""
:return: list of devices
"""
if device_id is None:
return "Device Id is a required parameter"
url = "{}/devices/{}".format(BASE_URL, device_id)
results = self.r.get(url, headers=self._get_auth_header()).json()
return Device(**results['devices'])
def delete_device(self, device_id):
"""
:param device_id
:return: Boolean
"""
if device_id is None:
return "Device Id is a required parameter"
url = "{}/devices/{}".format(BASE_URL, device_id)
results = self.r.delete(url, headers=self._get_auth_header())
return results.status_code == 204
def get_events(self):
"""
:return: list of events
"""
# ToDo - Create Event Object
raise NotImplementedError
url = "{}/events/".format(BASE_URL)
results = self.r.get(url, headers=self._get_auth_header()).json()
events = []
for event in results['events']:
events.append(Event(**event))
return events
def get_event(self, event_id):
"""
:param event_id:
:return: event details
"""
# ToDo - Create Event Object
raise NotImplementedError
if event_id is None:
return "Event ID is a required parameter"
url = "{}/events/{}/".format(BASE_URL, event_id)
results = self.r.get(url, headers=self._get_auth_header()).json()
return Event(**results['events'])
def get_locations(self, system_id):
"""
:param system_id
:return: list of locations
"""
url = "{}/systems/{}/locations/".format(BASE_URL, system_id)
results = self.r.get(url, headers=self._get_auth_header()).json()
locations = []
for location in results['locations']:
locations.append(Location(**location))
return locations
def get_location(self, location_id, system_id):
"""
:param system_id
:param location_id
:return: location object
"""
url = "{}/systems/{}/locations/{}/".format(BASE_URL, system_id, location_id)
results = self.r.get(url, headers=self._get_auth_header()).json()
return Location(**results['locations'])
def delete_location(self, location_id, system_id):
"""
:param system_id
:param location_id
:return: Boolean
"""
url = "{}/systems/{}/locations/{}/".format(BASE_URL, system_id, location_id)
results = self.r.delete(url, headers=self._get_auth_header())
return results.status_code == 204
def _get_auth_header(self):
t = Template(AUTH_HEADER)
if self.auth_token is None:
print("Please set PyNotion.token\np = PyNotion()\np.token = '<<token here>>'")
exit(1)
return json.loads(t.substitute(token=self.auth_token))
| 34.22 | 113 | 0.595057 | 1,401 | 11,977 | 4.880086 | 0.09636 | 0.046804 | 0.057043 | 0.072108 | 0.541904 | 0.517771 | 0.483107 | 0.454147 | 0.41802 | 0.414509 | 0 | 0.009619 | 0.270852 | 11,977 | 349 | 114 | 34.318052 | 0.773274 | 0.110796 | 0 | 0.324873 | 0 | 0.005076 | 0.123632 | 0.028121 | 0 | 0 | 0 | 0.011461 | 0 | 1 | 0.162437 | false | 0.015228 | 0.076142 | 0 | 0.431472 | 0.010152 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
cc53e51bc27a9706562d6ebf12d19cf726ff3572 | 2,496 | py | Python | src/extractor/csxextract/extractors/grobid.py | amm-kun/pdfmef | 712934533276484d2c76cfd1fecdf0f7062cc0bf | [
"Apache-2.0"
] | 25 | 2016-05-26T08:34:09.000Z | 2021-05-07T17:45:46.000Z | src/extractor/csxextract/extractors/grobid.py | SeerLabs/PDFMEF | 712934533276484d2c76cfd1fecdf0f7062cc0bf | [
"Apache-2.0"
] | 3 | 2016-05-17T20:52:51.000Z | 2016-05-19T13:37:19.000Z | src/extractor/csxextract/extractors/grobid.py | amm-kun/pdfmef | 712934533276484d2c76cfd1fecdf0f7062cc0bf | [
"Apache-2.0"
] | 7 | 2017-01-22T15:50:52.000Z | 2021-06-11T16:12:45.000Z | from extraction.runnables import Extractor, RunnableError, ExtractorResult
import extractor.csxextract.interfaces as interfaces
import extractor.csxextract.config as config
import extractor.csxextract.filters as filters
import defusedxml.ElementTree as safeET
import xml.etree.ElementTree as ET
import xml.sax.saxutils as xmlutils
import extraction.utils
import tempfile
import requests
import re
import os
# Returns full TEI xml document of the PDF
class GrobidTEIExtractor(interfaces.FullTextTEIExtractor):
dependencies = frozenset([filters.AcademicPaperFilter])
result_file_name = '.tei'
def extract(self, data, dep_results):
xml = _call_grobid_method(data, 'processFulltextDocument')
return ExtractorResult(xml_result=xml)
# Returns TEI xml document only of the PDF's header info
class GrobidHeaderTEIExtractor(interfaces.HeaderTEIExtractor):
dependencies = frozenset([filters.AcademicPaperFilter])
result_file_name = '.header.tei'
def extract(self, data, dep_results):
xml = _call_grobid_method(data, 'processHeaderDocument')
return ExtractorResult(xml_result=xml)
class GrobidCitationTEIExtractor(Extractor):
dependencies = frozenset([filters.AcademicPaperFilter])
result_file_name = '.cite.tei'
def extract(self, data, dep_results):
xml = _call_grobid_method(data, 'processReferences')
return ExtractorResult(xml_result=xml)
def _call_grobid_method(data, method):
url = '{0}/api/{1}'.format(config.GROBID_HOST, method)
# Write the pdf data to a temporary location so Grobid can process it
path = extraction.utils.temp_file(data, suffix='.pdf')
files = {'input': (path, open(path, 'rb')),}
try:
resp = requests.post(url, files=files)
except requests.exceptions.RequestException as ex:
raise RunnableError('Request to Grobid server failed')
finally:
os.remove(path)
if resp.status_code != 200:
raise RunnableError('Grobid returned status {0} instead of 200\nPossible Error:\n{1}'.format(resp.status_code, resp.text))
# remove all namespace info from xml string
# this is hacky but makes parsing it much much easier down the road
#remove_xmlns = re.compile(r'\sxmlns[^"]+"[^"]+"')
#xml_text = remove_xmlns.sub('', resp.content)
#xml = safeET.fromstring(xml_text)
xmlstring = re.sub(' xmlns="[^"]+"', '', resp.content, count=1)
xml = safeET.fromstring(xmlstring)
return xml
| 35.657143 | 131 | 0.72516 | 302 | 2,496 | 5.887417 | 0.430464 | 0.033746 | 0.035996 | 0.044994 | 0.249719 | 0.194038 | 0.194038 | 0.091114 | 0.091114 | 0.091114 | 0 | 0.005348 | 0.175881 | 2,496 | 69 | 132 | 36.173913 | 0.859018 | 0.159455 | 0 | 0.2 | 0 | 0 | 0.102969 | 0.021073 | 0 | 0 | 0 | 0 | 0 | 1 | 0.088889 | false | 0 | 0.266667 | 0 | 0.644444 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
cc5412276de8c611ee66cd660e808e297d27cffc | 4,059 | py | Python | link_example/boyd_pipe.py | stoiver/anuga_drainage | d5ab52efc0bfd8a163ac45fdc5da305cdf469a94 | [
"MIT"
] | 1 | 2020-11-13T13:49:22.000Z | 2020-11-13T13:49:22.000Z | link_example/boyd_pipe.py | stoiver/anuga_drainage | d5ab52efc0bfd8a163ac45fdc5da305cdf469a94 | [
"MIT"
] | null | null | null | link_example/boyd_pipe.py | stoiver/anuga_drainage | d5ab52efc0bfd8a163ac45fdc5da305cdf469a94 | [
"MIT"
] | 1 | 2021-05-18T11:45:15.000Z | 2021-05-18T11:45:15.000Z | #------------------------------------------------------------------------------
# IMPORT NECESSARY MODULES
#------------------------------------------------------------------------------
print (' ABOUT to Start Simulation:- Importing Modules')
import anuga, anuga.parallel, numpy, time, os, glob
from anuga.operators.rate_operators import Polygonal_rate_operator
from anuga import file_function, Polygon_function, read_polygon, create_mesh_from_regions, Domain, Inlet_operator
import anuga.utilities.spatialInputUtil as su
from anuga import distribute, myid, numprocs, finalize, barrier
from anuga.parallel.parallel_operator_factory import Inlet_operator, Boyd_box_operator, Boyd_pipe_operator
from anuga import Rate_operator
#------------------------------------------------------------------------------
# FILENAMES, MODEL DOMAIN and VARIABLES
#------------------------------------------------------------------------------
basename = 'terrain'
outname = 'boyd_pipe'
meshname = 'terrain.msh'
W=296600.
N=6180070.
E=296730.
S=6179960.
#------------------------------------------------------------------------------
# CREATING MESH
#------------------------------------------------------------------------------
bounding_polygon = [[W, S], [E, S], [E, N], [W, N]]
create_mesh_from_regions(bounding_polygon,
boundary_tags={'south': [0], 'east': [1], 'north': [2], 'west': [3]},
maximum_triangle_area=1.0,
filename=meshname,
use_cache=False,
verbose=True)
#------------------------------------------------------------------------------
# SETUP COMPUTATIONAL DOMAIN
#------------------------------------------------------------------------------
domain = Domain(meshname, use_cache=False, verbose=True)
domain.set_minimum_storable_height(0.0001)
domain.set_name(outname)
print (domain.statistics())
#------------------------------------------------------------------------------
# APPLY MANNING'S ROUGHNESSES
#------------------------------------------------------------------------------
domain.set_quantity('friction', 0.035)
domain.set_quantity('elevation', filename=basename+'.csv', use_cache=False, verbose=True, alpha=0.1)
#------------------------------------------------------------------------------
# BOYD PIPE CULVERT
#------------------------------------------------------------------------------
losses = {'inlet':0.5, 'outlet':1.0, 'bend':0.0, 'grate':0.0, 'pier': 0.0, 'other': 0.0}
ep0 = numpy.array([296660.390,6180017.186])
ep1 = numpy.array([296649.976,6180038.872])
invert_elevations=[12.40,12.20]
culvert = Boyd_pipe_operator(domain,
losses=losses,
diameter=1.0,
end_points=[ep0, ep1],
invert_elevations=invert_elevations,
use_momentum_jet=False,
use_velocity_head=False,
manning=0.013,
logging=True,
label='boyd_pipe',
verbose=False)
#------------------------------------------------------------------------------
# APPLY FLOW
#------------------------------------------------------------------------------
line=[[296669.258,6179974.191],[296677.321,6179976.449]]
anuga.parallel.Inlet_operator(domain, line, 1.0)
#------------------------------------------------------------------------------
# SETUP BOUNDARY CONDITIONS
#------------------------------------------------------------------------------
print ('Available boundary tags', domain.get_boundary_tags())
Br = anuga.Reflective_boundary(domain)
Bd = anuga.Dirichlet_boundary([0,0,0])
domain.set_boundary({'west': Bd, 'south': Br, 'north': Bd, 'east': Bd})
#------------------------------------------------------------------------------
# EVOLVE SYSTEM THROUGH TIME
#------------------------------------------------------------------------------
import time
t0 = time.time()
for t in domain.evolve(yieldstep = 1, finaltime = 4000):
print (domain.timestepping_statistics())
print (domain.boundary_statistics(quantities='stage'))
print ('Finished')
| 36.241071 | 114 | 0.452328 | 349 | 4,059 | 5.103152 | 0.467049 | 0.006738 | 0.025267 | 0.033689 | 0.04941 | 0.035935 | 0 | 0 | 0 | 0 | 0 | 0.044556 | 0.115299 | 4,059 | 111 | 115 | 36.567568 | 0.451406 | 0.39862 | 0 | 0 | 0 | 0 | 0.08858 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.163636 | 0 | 0.163636 | 0.109091 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
cc56be5595a6711ad636b9c2de2c2e07499ae61a | 1,977 | py | Python | script/jira-release.py | nmesar/jira-release | d389bdebc49dd7d54e5dcc78f88ef2d863d233fb | [
"MIT"
] | 3 | 2021-04-06T05:08:53.000Z | 2022-03-11T15:38:32.000Z | script/jira-release.py | nmesar/jira-release | d389bdebc49dd7d54e5dcc78f88ef2d863d233fb | [
"MIT"
] | 1 | 2021-05-26T13:37:16.000Z | 2021-05-26T14:05:23.000Z | script/jira-release.py | nmesar/jira-release | d389bdebc49dd7d54e5dcc78f88ef2d863d233fb | [
"MIT"
] | 1 | 2021-04-27T14:01:58.000Z | 2021-04-27T14:01:58.000Z | #!/usr/bin/env python
import os
import requests
from datetime import datetime
jira_version_name = os.getenv('JIRA_VERSION_NAME')
jira_project = os.getenv('JIRA_PROJ')
auth_user = os.getenv('JIRA_AUTH_USER')
auth_password = os.getenv('JIRA_AUTH_PASSWORD')
if jira_version_name is None:
print("Version Name Variable [JIRA_VERSION_NAME] is not defined.")
exit(2)
if jira_project is None:
print("Jira Project Environment Variable [JIRA_PROJ] is not defined.")
exit(2)
if auth_user is None:
print("Authentication User Environment Variable [JIRA_AUTH_USER] is not defined.")
exit(2)
if auth_password is None:
print("Authentication Password Environment Variable [JIRA_AUTH_PASSWORD] is not defined.")
exit(2)
jira_version_date = datetime.today().strftime('%d/%b/%Y')
jira_url = os.getenv('JIRA_URL', 'https://jira.org')
jira_version_release_env = os.getenv('JIRA_VERSION_RELEASED', 'true')
jira_version_release = True
if jira_version_release_env == 'false':
jira_version_release = False
jira_version_description =os.getenv('JIRA_VERSION_DESCRIPTION', 'Version {}'.format(jira_version_name))
print('Will Attempt to create version [{}] for project [{}] '.format(jira_version_name, jira_project))
jira_api_version = os.getenv('JIRA_API_VERSION', '2')
if jira_api_version == '2':
data = {
'description': jira_version_description,
'name': jira_version_name,
'userReleaseDate': jira_version_date,
'project': jira_project,
'released': jira_version_release
}
# Construct URL
api_url = ('%(url)s/rest/api/2/version' % {'url': jira_url})
print('Sending request to:')
print(api_url)
print('with body')
print(data)
# Post build status to Bitbucket
response = requests.post(api_url, auth=(auth_user, auth_password), json=data)
print('Response:')
print(response)
print(response.text)
if response:
exit(0)
else:
exit(1)
| 27.458333 | 103 | 0.704603 | 270 | 1,977 | 4.9 | 0.255556 | 0.14966 | 0.072562 | 0.048375 | 0.101285 | 0.049131 | 0.034769 | 0 | 0 | 0 | 0 | 0.005518 | 0.175013 | 1,977 | 71 | 104 | 27.84507 | 0.805641 | 0.032878 | 0 | 0.081633 | 0 | 0 | 0.318491 | 0.037192 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.081633 | 0.061224 | 0 | 0.061224 | 0.244898 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
cc5bee5e35931196084b4b5d85f90367fb351762 | 320 | py | Python | pertemuan_5/3_Table_Record_Search_with_Session/run.py | Muhammad-Yunus/Flask-Web-Development | c13e3fda30151b1155242abe4532d5f4d7bc120e | [
"Apache-2.0"
] | null | null | null | pertemuan_5/3_Table_Record_Search_with_Session/run.py | Muhammad-Yunus/Flask-Web-Development | c13e3fda30151b1155242abe4532d5f4d7bc120e | [
"Apache-2.0"
] | null | null | null | pertemuan_5/3_Table_Record_Search_with_Session/run.py | Muhammad-Yunus/Flask-Web-Development | c13e3fda30151b1155242abe4532d5f4d7bc120e | [
"Apache-2.0"
] | null | null | null | from app import app
from app import db
import os
if __name__ == '__main__':
# if database file not exist, create it.
database_path = app.config['DATABASE_FILE']
if not os.path.exists(database_path):
with app.app_context():
db.create_all()
# invoke app to run
app.run()
| 20 | 47 | 0.63125 | 46 | 320 | 4.108696 | 0.5 | 0.074074 | 0.137566 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.275 | 320 | 16 | 48 | 20 | 0.814655 | 0.175 | 0 | 0 | 0 | 0 | 0.080153 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.333333 | 0 | 0.333333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
cc5dfeb2c69a70c0544636a6f4098e91c5b3abfb | 2,403 | py | Python | app_video.py | theGASK/Cisco-Flask-API | d0baa3b1e9713c1df0bc004dc884cbfd0dba0fcf | [
"MIT"
] | null | null | null | app_video.py | theGASK/Cisco-Flask-API | d0baa3b1e9713c1df0bc004dc884cbfd0dba0fcf | [
"MIT"
] | null | null | null | app_video.py | theGASK/Cisco-Flask-API | d0baa3b1e9713c1df0bc004dc884cbfd0dba0fcf | [
"MIT"
] | null | null | null | from flask import Flask
from flask_restful import Api, Resource, abort, reqparse
from flask_sqlalchemy import SQLAlchemy
#REMOVE DEBUG
app =Flask(__name__)
api = Api(app)
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///database.db'
db = SQLAlchemy(app)
class ConferenceModel(db.Model):
id = db.Column(db.Integer, primary_key=True)
room = db.Column(db.String(100), nullable=False)
spectators = db.Column(db.Integer, nullable=True)
participants = db.Column(db.Integer, nullable=False)
def __repr__(self):
return f'Session(room ={room}, spectators={spectators}, participants={participants})'
# Remove once created
# db.create_all()
video_put_args = reqparse.RequestParser()
video_put_args.add_argument('room', type=str, help='Name of the room is required', required=True)
video_put_args.add_argument('participants', type=str, help='Name of the participant is required', required=True)
video_put_args.add_argument('spectators', type=int, help='Number of spectators is required', required=True)
# def video_exists(video_id):
# if video_id in videos:
# abort(409, message='Video already exists')
# def video_404(video_id):
# if video_id not in videos:
# abort(404, message='Video is not found')
=======
video_put_args = reqparse.RequestParser()
video_put_args.add_argument('room', type=str, help='Name of the room is required', required=True)
video_put_args.add_argument('participant', type=str, help='Name of the participant is required', required=True)
video_put_args.add_argument('spectators', type=int, help='Number of spectators is required', required=True)
videos = {}
def video_exists(video_id):
if video_id in videos:
abort(409, message='Video already exists')
def video_404(video_id):
if video_id not in videos:
abort(404, message='Video is not found')
class Video_Stream(Resource):
def get(self, video_id):
video_404(video_id)
return videos[video_id]
def put(self, video_id):
video_exists(video_id)
args = video_put_args.parse_args()
videos[video_id] = args
return videos[video_id], 201
def delete(self, video_id):
video_404(video_id)
del videos[video_id]
return 'Video deleted', 204
api.add_resource(Video_Stream, "/Video_Streaming/<int:video_id>")
if __name__ == "__main__":
app.run(debug=True) | 28.951807 | 112 | 0.712443 | 342 | 2,403 | 4.780702 | 0.236842 | 0.081346 | 0.066055 | 0.055046 | 0.557798 | 0.527217 | 0.527217 | 0.495413 | 0.495413 | 0.495413 | 0 | 0.016541 | 0.169788 | 2,403 | 83 | 113 | 28.951807 | 0.803008 | 0.107782 | 0 | 0.173913 | 0 | 0 | 0.210674 | 0.059457 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.065217 | null | null | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
cc650bd505b06e0304df17ef1b59deb16466cdb3 | 12,061 | py | Python | misc-modules/sparktk-ext/sparktk_ext.py | Bhanuprakash-ch/jupyter | b48156a53146dd92ed9e83992dff48288e14ad14 | [
"Apache-2.0"
] | 5 | 2016-09-24T03:33:59.000Z | 2020-05-09T14:30:38.000Z | misc-modules/sparktk-ext/sparktk_ext.py | Kendralabs/jupyter | b48156a53146dd92ed9e83992dff48288e14ad14 | [
"Apache-2.0"
] | 2 | 2017-01-09T22:49:08.000Z | 2017-01-23T19:31:22.000Z | misc-modules/sparktk-ext/sparktk_ext.py | Kendralabs/jupyter | b48156a53146dd92ed9e83992dff48288e14ad14 | [
"Apache-2.0"
] | 10 | 2016-06-26T01:49:31.000Z | 2020-05-09T14:30:44.000Z | import errno
import json
import os
import subprocess
import time
from concurrent.futures import ProcessPoolExecutor as Pool
from notebook.base.handlers import IPythonHandler
STATUS_FILE = 'STATUS.log'
LOG_FILE = 'LOG.log'
APP_STATUS = {
'UPLOADED': 'uploaded',
'SUBMITTED': 'submitted',
'COMPLETED': 'completed'
}
APP_SETTINGS = {
'TEMPLATE_PATH': r"templates",
'STATIC_PATH': r"templates/static",
'UPLOADS_PATH': r"uploads",
"XSRF_COOKIES": False
}
RESPONSE = {
"DRIVER_PATH": 'driver-path', # submitted app id
"APP_STATUS": 'app-status', # UPLOADED, SUBMITTED, COMPLETED
"APP_DIR": 'app-dir', # Path where the app bits are uploaded
"LAST_UPDATED": "last-updated" # The time and date of last status update
}
def update_status(driver_path, app_status=APP_STATUS['COMPLETED']):
"""
:param driver_path: path to the main sparktk/pyspark script in the uploads directory
:param app_status: final status of the app after this update written to STATUS_FILE
:return: handler to the updated STATUS_FILE
"""
app_dir = os.path.dirname(driver_path)
status_file = open(os.path.dirname(driver_path) + '/' + STATUS_FILE, 'a+w')
status_file.write('\n')
status_file.write(json.dumps(
dict(
{
RESPONSE['DRIVER_PATH']: driver_path,
RESPONSE['APP_STATUS']: app_status,
RESPONSE['APP_DIR']: app_dir,
RESPONSE['LAST_UPDATED']: time.strftime("%c")
}
)
))
status_file.close()
return status_file
class IndexHandler(IPythonHandler):
"""
implements the "hello" REST api endpoint.
Examples:
curl http://<JUPYTER_NOTEBOOK_URL>/hello
"""
def get(self):
self.write("Hello From Jupyter")
def write_error(self, status_code, **kwargs):
self.write("Gosh darnit, user! You caused a %d error." % status_code)
class UploadFormHandler(IPythonHandler):
def get(self):
self.render("templates/fileuploadform.html")
class UploadHandler(IPythonHandler):
"""
implements the "upload" REST api endpoint.
currently the only way to upload files to Jupyter is using the upload Form.
after each attemp to upload the file(s) are loaded into a directory format like "uploads/dddd" where d is a digit.
Examples:
curl http://<JUPYTER_NOTEBOOK_URL>/upload -F "filearg=@/home/ashahba/frame-basics.py"
"""
def create_upload_dir(self):
"""
:return: name of the directory created that contains uploaded script(s), jars
"""
max_dir_len = len('0000')
i = 0
dir_name = APP_SETTINGS['UPLOADS_PATH'] + '/%s' % str(i).zfill(max_dir_len)
while os.path.exists(dir_name) and (i < 10 ** max_dir_len):
i += 1
dir_name = APP_SETTINGS['UPLOADS_PATH'] + '/%s' % str(i).zfill(max_dir_len)
if (i >= 10 ** max_dir_len):
raise Exception("Too many uploads have been done!\nPlease run some cleanup before uploading more files.\n")
try:
os.makedirs(dir_name)
except OSError as exc:
if exc.errno == errno.EEXIST and os.path.isdir(dir_name):
pass
else:
raise Exception("Directory creation failed.\n")
return dir_name
def post(self):
app_dir = self.create_upload_dir()
for filearg in self.request.files['filearg']:
original_fname = filearg['filename']
output_file = open(app_dir + '/' + original_fname, 'w')
output_file.write(filearg['body'])
driver_path = app_dir + '/' + original_fname
status_file = update_status(driver_path, app_status=APP_STATUS['UPLOADED'])
with open(status_file.name, 'rb') as f:
self.write(f.readlines()[-1])
f.close()
def mark_submitted(driver_path):
"""
updates the STATUS_FILE with a new entry when job is submitted
:param driver_path: the path to the main sparktk/pyspark script within the uploads folder
:return: None
"""
update_status(driver_path, app_status=APP_STATUS['SUBMITTED'])
def spark_submit(exec_string, log_file, driver_path):
"""
asynchronously run the pyspark/sparktk submitted script while writing the logs to the log_file for the app
:param exec_string: the command that is going to be run
:param log_file: the file containing command(script) logs while running
:param driver_path: the path to the main sparktk/pyspark script within the uploads folder
:return: None
"""
print "Entering spark_submit"
mark_submitted(driver_path)
pool = Pool(max_workers=1)
cmd_string = "%s >>%s 2>&1" % (exec_string, log_file)
print "CMD stting is %s" % (cmd_string)
future = pool.submit(subprocess.call, cmd_string, shell=True)
future.driver_path = driver_path
future.add_done_callback(mark_completed)
def mark_completed(future):
"""
once the application has finished running, updates the status_file with a new entry for COMPLETED
:param future:
:return: None
"""
update_status(future.driver_path, app_status=APP_STATUS['COMPLETED'])
# TODO: this is only a workaround to ublock the spark-submit against sparktk apps while the bug is being fixed.
def get_sparktk_submit_jars():
"""
spark-submit requires sparktk apps to provide values for both --jars and --driver-class-path options.
This function finds these jars.
:return: a list of strings and a string to be used for --jars and --driver-class-path command line option values
"""
extns = ('.jar')
sparktk_submit_jars = []
for root, dirnames, fns in os.walk(os.environ['SPARK_HOME']):
sparktk_submit_jars.extend(os.path.join(root, fn) for fn in fns if fn.lower().endswith(extns))
for root, dirnames, fns in os.walk(os.environ['SPARKTK_HOME']):
sparktk_submit_jars.extend(os.path.join(root, fn) for fn in fns if fn.lower().endswith(extns))
sparktk_driver_class_path = \
os.environ['SPARK_HOME'] + \
"/lib/*:" + os.environ['SPARKTK_HOME'] \
+ "/*:" + os.environ['SPARKTK_HOME'] + \
"/dependencies/*"
return ','.join(sparktk_submit_jars), sparktk_driver_class_path
class SparkSubmitHandler(IPythonHandler):
"""
implements the "spark-submit" REST api end point
Examples:
curl http://<JUPYTER_NOTEBOOK_URL>/spark-submit -d "driver-path=uploads/0001/frame-basics.py"
"""
def post(self):
driver_path = self.get_argument('driver-path')
if (os.path.isfile(driver_path)):
logfile = os.path.dirname(driver_path) + '/' + 'LOG.log'
sparktk_submit_jars, sparktk_driver_class_path = get_sparktk_submit_jars()
exec_string = 'spark-submit --jars %s --driver-class-path %s %s' % (
sparktk_submit_jars, sparktk_driver_class_path, driver_path)
spark_submit(exec_string, logfile, driver_path)
self.write("SparkSubmit Job Queued\n")
else:
self.write("The given path %s is not a valid script" % (driver_path))
class LogHandler(IPythonHandler):
"""
implements the "logs" REST api endpoint.
Examples:
curl http://<JUPYTER_NOTEBOOK_URL>/logs -d "app-path=uploads/0001" -d "offset=1" -d "n=100"
"""
def post(self):
app_path = self.get_argument('app-path')
offset_str = self.get_argument('offset', '0', True)
num_lines_str = self.get_argument('n', '10', True)
try:
offset = int(offset_str)
num_lines = int(num_lines_str)
except ValueError:
self.write("both offset and n must be integers.")
return
logfile = app_path + '/' + LOG_FILE
if (os.path.exists(app_path) and os.path.isfile(logfile)):
with open(logfile, 'rb') as f:
for i, line in enumerate(f):
if (i < offset):
pass
elif (num_lines < 0 or i < offset + num_lines):
self.write(line)
else:
self.write("Error, app-path %s doesn't exist or no logs exist yet" % (app_path))
class StatusHandler(IPythonHandler):
"""
implements the "logs" REST api endpoint.
Examples:
curl http://<JUPYTER_NOTEBOOK_URL>/status -d "app-path=uploads/0001"
"""
def post(self):
app_path = self.get_argument('app-path')
status_file = app_path + '/' + STATUS_FILE
if (os.path.exists(app_path) and os.path.isfile(status_file)):
with open(status_file, 'rb') as f:
for i, line in enumerate(f):
pass
self.write(line)
else:
self.write("Error, app-path %s doesn't exist or no status exist yet" % (app_path))
class RenameHandler(IPythonHandler):
"""
implements the "rename" REST api endpoint.
Examples:
curl http://<JUPYTER_NOTEBOOK_URL>/rename -d "app-path=uploads/0001" -d "dst-path=uploads/myapp"
"""
def post(self):
app_path = self.get_argument('app-path')
dst_path = self.get_argument('dst-path')
status_file_path = app_path + '/' + STATUS_FILE
if (os.path.exists(app_path) and os.path.isfile(status_file_path)):
with open(status_file_path, 'rb') as f:
if not (json.loads(f.readlines()[-1])['app-status'] == APP_STATUS['SUBMITTED']):
os.rename(app_path, dst_path)
self.write('the new path for the app is %s' % (dst_path))
else:
self.write("Error, directory %s is in use, please try later" % (app_path))
else:
self.write("Error, app-path %s doesn't exist or not a valid path" % (app_path))
class DeleteHandler(IPythonHandler):
"""
implements the "delete" REST api endpoint.
Examples:
curl http://<JUPYTER_NOTEBOOK_URL>/delete -d "app-path=uploads/0001"
"""
def post(self):
app_path = self.get_argument('app-path')
status_file_path = app_path + '/' + STATUS_FILE
if (os.path.exists(app_path)) and os.path.isfile(status_file_path):
with open(status_file_path, 'rb') as f:
if not (json.loads(f.readlines()[-1])['app-status'] == APP_STATUS['SUBMITTED']):
# TODO: Once jupyter image size is not an issue remove this and user shutil module
for root, dirs, files in os.walk(top=app_path, topdown=False):
for name in files:
os.remove(os.path.join(root, name))
for name in dirs:
os.rmdir(os.path.join(root, name))
os.rmdir(app_path)
self.write("The app directory %s was successfully deleted" % (app_path))
else:
self.write("Error, directory %s is in use, please try later" % (app_path))
else:
self.write("Pass, app-path %s doesn't exist or not a valid path. No action is needed." % (app_path))
def load_jupyter_server_extension(nb_app):
'''
Based on https://github.com/Carreau/jupyter-book/blob/master/extensions/server_ext.py
'''
web_app = nb_app.web_app
host_pattern = '.*$'
web_app.settings["jinja2_env"].loader.searchpath += [
os.path.join(os.path.dirname(__file__), "templates")
]
web_app.add_handlers(host_pattern,
host_handlers=[
(r"/hello", IndexHandler),
(r"/upload", UploadHandler),
(r"/spark-submit", SparkSubmitHandler),
(r"/rename", RenameHandler),
(r"/delete", DeleteHandler),
(r"/logs", LogHandler),
(r"/status", StatusHandler),
]
)
| 36.548485 | 119 | 0.610646 | 1,549 | 12,061 | 4.590704 | 0.212395 | 0.033469 | 0.0135 | 0.02025 | 0.373365 | 0.33188 | 0.316271 | 0.279426 | 0.25327 | 0.215722 | 0 | 0.00514 | 0.274107 | 12,061 | 329 | 120 | 36.659574 | 0.807082 | 0.026117 | 0 | 0.207071 | 0 | 0.005051 | 0.161004 | 0.00322 | 0 | 0 | 0 | 0.006079 | 0 | 0 | null | null | 0.020202 | 0.035354 | null | null | 0.010101 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
cc7b00bd1fc5d694719efad0abad87b8890d1551 | 492 | py | Python | birgitta/recipe/debug/dataframe.py | telia-oss/birgitta | e4d27465f54a1d4789741e19e15aec726149a735 | [
"MIT"
] | 8 | 2019-11-25T16:39:33.000Z | 2022-03-31T12:48:54.000Z | birgitta/recipe/debug/dataframe.py | telia-oss/birgitta | e4d27465f54a1d4789741e19e15aec726149a735 | [
"MIT"
] | 218 | 2019-09-09T11:11:59.000Z | 2022-03-08T05:16:40.000Z | birgitta/recipe/debug/dataframe.py | telia-oss/birgitta | e4d27465f54a1d4789741e19e15aec726149a735 | [
"MIT"
] | 4 | 2020-07-21T15:33:40.000Z | 2021-12-22T11:32:45.000Z | """Debug functions for notebooks.
"""
def count(df, name=None):
"""Print the count of dataframe with title."""
if name:
print("Dataset: %s" % (name))
print("Count: %d" % (df.count()))
def show(df, name=None, num_rows=1):
"""Print title and show a dataframe"""
if name:
print("Dataset: %s" % (name))
df.show(num_rows)
def profile(df, name):
"""Profile a dataframe, initially just count and show."""
count(df, name)
show(df, name=name)
| 21.391304 | 61 | 0.591463 | 70 | 492 | 4.128571 | 0.4 | 0.103806 | 0.076125 | 0.124567 | 0.15917 | 0.15917 | 0 | 0 | 0 | 0 | 0 | 0.002653 | 0.23374 | 492 | 22 | 62 | 22.363636 | 0.763926 | 0.317073 | 0 | 0.363636 | 0 | 0 | 0.098726 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.272727 | false | 0 | 0 | 0 | 0.272727 | 0.272727 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
cc7be425fb54cf143f7728410a170c501953c40e | 7,346 | py | Python | src/tlstofu.py | faelif/tls-tofu | 367109ce7a8e6f41e77c41cf010e8c9a2c87a3ed | [
"MIT"
] | 1 | 2022-02-19T22:28:44.000Z | 2022-02-19T22:28:44.000Z | src/tlstofu.py | faelif/tls-tofu | 367109ce7a8e6f41e77c41cf010e8c9a2c87a3ed | [
"MIT"
] | null | null | null | src/tlstofu.py | faelif/tls-tofu | 367109ce7a8e6f41e77c41cf010e8c9a2c87a3ed | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
#
#
import pathlib # to find config directory
import os # to make config directory
import datetime # for datetime.now()
import ssl # for ssl.CertificateError
import ipaddress # to test if an address is an ipaddress
import re # to match hostnames to eachother
import sqlite3
from cryptography import x509 # to check dates and match hostname
# Folder for TLS-TOFU config
CONF_DIR = pathlib.Path("~/.config/tls-tofu").expanduser()
# TOFU known_hosts database. This is stored per-user.
USER_TLS_TOFU_DB = CONF_DIR / "known_hosts.db"
os.makedirs(CONF_DIR, exist_ok=True)
class CertManager(object):
def __init__(self, db_path: str = USER_TLS_TOFU_DB.absolute()):
self.db_path = db_path
with sqlite3.connect(self.db_path) as con:
cur = con.cursor()
cur.execute(
"CREATE TABLE IF NOT EXISTS keys (host TEXT, cert BLOB)"
)
def add_cert(self, cert: bytes, address: str):
with sqlite3.connect(self.db_path) as con:
cur = con.cursor()
cur.execute(
"INSERT INTO keys VALUES (?, ?)", (address, cert)
)
def get_certs(self):
with sqlite3.connect(self.db_path) as con:
cur = con.cursor()
cur.execute(
"SELECT * FROM keys"
)
return cur.fetchall()
def verify_cert(self, cert: bytes, address: str):
for i in self.get_certs():
if i[0] == address:
if i[1] == cert:
return True
else:
raise CertMismatchError(
"The specified address has a certificate that does "
"not match that in the pre-existing database"
)
raise CertNotFoundError(
"The specified address was not located in the pre-existing "
"database"
)
cert_manager = CertManager()
class CertInvalidError(ssl.CertificateError):
pass
class CertUntrustedError(ssl.CertificateError):
pass
class CertDatetimeError(CertInvalidError):
pass
class CertAddressError(CertInvalidError):
pass
class CertMatchingError(CertUntrustedError):
pass
class CertNotFoundError(CertUntrustedError):
pass
class CertMismatchError(CertUntrustedError):
pass
def validate_cert(cert: bytes, hostname: str = None):
"""
Takes DER-encoded cert as returned from ssock.getpeercert(binary_form=True)
and validates it, checking if it matches a given address (optional) and
making sure that it's within the bounds in the `Validity' section of the
cert.
Inputs:
cert: bytes
The DER-encoded certificate to validate.
hostname: str = None
A hostname to match against. If it is falsey, no hostname checking
is performed. If it is truthy, it is checked for equality against
the subjectAltNames and commonName of the cert.
Outputs:
If the cert is valid for the hostname and current time, outputs `True'.
Otherwise, raises an exception giving more detail.
Raises:
CertDatetimeError (subclass of CertInvalidError)
This is raised if the supplied certificate is either past expiry
or is not yet valid
CertAddressError (subclass of CertInvalidError)
This is raised if the supplied certificate has an invalid address
in it's altNames or commonName
CertMatchingError (subclass of CertUntrustedError)
This is raised if the address supplied does not appear in the
certificate's altNames or commonName
"""
return_value = False
parsed_cert = x509.load_der_x509_certificate(cert)
now = datetime.datetime.utcnow()
if parsed_cert.not_valid_before > now:
raise CertDatetimeError(
f"The supplied certificate is not valid until"
f"{parsed_cert.not_valid_before}"
)
elif parsed_cert.not_valid_after < now:
raise CertDatetimeError(
f"The supplied certificate is not valid after"
f"{parsed_cert.not_valid_after}"
)
if hostname:
names = []
ips = []
cert_common_name = parsed_cert.subject.get_attributes_for_oid(
x509.oid.NameOID.COMMON_NAME
)[0].value
names.append(cert_common_name)
try:
alt_names = parsed_cert.extensions.get_extension_for_oid(
x509.oid.ExtensionOID.SUBJECT_ALTERNATIVE_NAME
).value
except x509.ExtensionNotFound:
pass
else:
alt_hostnames = alt_names.get_values_for_type(
x509.DNSName
)
alt_ips = alt_names.get_values_for_type(
x509.IPAddress
)
names.extend(alt_hostnames)
ips.extend(alt_ips)
try: # If the hostname is an IP address...
host_ip = ipaddress.ip_address(hostname)
except ValueError:
pass
else:
for i in ips: # ... does it match one in the subjectAltNames?
if ipaddress.ip_address(i) == host_ip:
return_value = True
for i in names:
# Is the cert's name or hostname an IDN?
if i[:4] == "xn--" or hostname[:4] == "xn--":
# If so, don't expand wildcards and match case-insensitively
if hostname.lower() == i.lower():
return_value = True
else:
# get sections of a dNSName, as a left-most component and a
# domain
dns_name_components = i.split(".", 1)
left_component = dns_name_components[0]
if "*" in left_component: # Is it a wildcard cert?
host_name_components = hostname.split(".", 1)
# Do the domain parts match?
if host_name_components[1] == dns_name_components[1]:
# If so, check the left-most component with wildcards
if left_component.count("*") > 1:
# If the dNSName has more than one asterisk, it's
# invalid
raise CertAddressError(
"The supplied certificate contains an invalid "
"dNSName containing two or more asterisks."
)
else:
# Use regex to detect matches
regex = left_component.replace(
"*",
"[A-Za-z0-9-]*"
) + dns_name_components[1]
if re.match(regex, hostname):
return_value = True
elif i == hostname:
return_value = True
# If we got this far, no errors have been raised
if return_value:
return True
else:
# The cert isn't valid for the hostname
raise CertMatchingError(
"The supplied certificate is invalid for the supplied address"
)
return True
| 33.239819 | 79 | 0.571059 | 826 | 7,346 | 4.958838 | 0.297821 | 0.018799 | 0.032227 | 0.023438 | 0.174805 | 0.125732 | 0.112549 | 0.098877 | 0.098877 | 0.098877 | 0 | 0.009174 | 0.361966 | 7,346 | 220 | 80 | 33.390909 | 0.864732 | 0.270487 | 0 | 0.257353 | 0 | 0 | 0.116724 | 0.01129 | 0 | 0 | 0 | 0 | 0 | 1 | 0.036765 | false | 0.066176 | 0.058824 | 0 | 0.183824 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
cc87734a6c5125adbaa56493538b18b66eeca8e8 | 478 | py | Python | aula2/decorador.py | es99/curso-flask | 03446ccc89894a2c4f2d1c14d0040e80d2226d9e | [
"Unlicense"
] | 1 | 2020-07-21T05:17:20.000Z | 2020-07-21T05:17:20.000Z | aula2/decorador.py | es99/curso-flask | 03446ccc89894a2c4f2d1c14d0040e80d2226d9e | [
"Unlicense"
] | null | null | null | aula2/decorador.py | es99/curso-flask | 03446ccc89894a2c4f2d1c14d0040e80d2226d9e | [
"Unlicense"
] | null | null | null | from datetime import datetime
def grava_arquivo():
data = datetime.now().strftime("%d/%/%Y - %H:%M")
with open('logs.txt', 'a') as file:
file.write("email enviado " + data)
def verifica(function):
def wrapper():
grava_arquivo()
function()
return wrapper
def imprime_ola():
print("Olá mundo")
@verifica
def envia_email(endereco):
print(f"email enviado para {endereco}")
if __name__ == "__main__":
verifica(imprime_ola())
| 19.916667 | 53 | 0.633891 | 60 | 478 | 4.833333 | 0.65 | 0.082759 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.215481 | 478 | 23 | 54 | 20.782609 | 0.773333 | 0 | 0 | 0 | 0 | 0 | 0.176101 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.294118 | false | 0 | 0.058824 | 0 | 0.411765 | 0.117647 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
cc88947d5669842fd2d6485d9a7fdaf40075e22d | 396 | py | Python | rent/migrations/0007_rentorder_money.py | wy672515431/- | 582a1b452bce21584117190539ff5fe84a1cc885 | [
"MIT"
] | null | null | null | rent/migrations/0007_rentorder_money.py | wy672515431/- | 582a1b452bce21584117190539ff5fe84a1cc885 | [
"MIT"
] | 1 | 2020-06-13T11:47:08.000Z | 2020-06-13T11:47:08.000Z | rent/migrations/0007_rentorder_money.py | wy672515431/- | 582a1b452bce21584117190539ff5fe84a1cc885 | [
"MIT"
] | null | null | null | # Generated by Django 3.0.3 on 2020-06-12 04:39
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('rent', '0006_rentorder_pay'),
]
operations = [
migrations.AddField(
model_name='rentorder',
name='money',
field=models.IntegerField(default=0, verbose_name='金额'),
),
]
| 20.842105 | 68 | 0.59596 | 43 | 396 | 5.395349 | 0.767442 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.070423 | 0.282828 | 396 | 18 | 69 | 22 | 0.746479 | 0.113636 | 0 | 0 | 1 | 0 | 0.108883 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.083333 | 0 | 0.333333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
cc8976c20ce6331a199868056aec0f3b8e84d442 | 5,069 | py | Python | releaseherald/releaseherald/plugins/hookspecs.py | Morgan-Stanley/Testplan | 9374d6e0da6ae9aa7a1b5e08b42cd21993485837 | [
"Apache-2.0"
] | null | null | null | releaseherald/releaseherald/plugins/hookspecs.py | Morgan-Stanley/Testplan | 9374d6e0da6ae9aa7a1b5e08b42cd21993485837 | [
"Apache-2.0"
] | null | null | null | releaseherald/releaseherald/plugins/hookspecs.py | Morgan-Stanley/Testplan | 9374d6e0da6ae9aa7a1b5e08b42cd21993485837 | [
"Apache-2.0"
] | null | null | null | from typing import List, Dict, Any
import pluggy
from git import Repo, Tag
from releaseherald.configuration import Configuration
from releaseherald.plugins import CommitInfo
from releaseherald.plugins.interface import (
MutableProxy,
VersionNews,
News,
Output,
CommandOptions,
)
hookspec = pluggy.HookspecMarker("releaseherald")
@hookspec
def process_config(config: Configuration):
"""
Called as the first callback to the plugin. It can use this to
initiate itself based in the configuration read from the config file.
If also has a chance to change the configuration. It can for example
parse and validate it's own sub configuration and replace the dict in `config`
with a more manageable object.
Args:
config: The configuration
"""
pass
@hookspec
def get_command_options(command: str) -> CommandOptions:
"""
This callback give chance to a plugin to add commandline options to
various commands. It is called with the name of the command.
Args:
command: something
Returns:
collection of `click.Options` that are added to the cli command
and a callable that can set the configured value as the default for the
cli option"""
pass
@hookspec
def on_start_command(command: str, kwargs: Dict[str, Any]):
"""
Called before a cli command start to execute.
Args:
command: the name of the command
kwargs: the parameters the command called with
"""
pass
@hookspec
def process_tags(repo: Repo, tags: List[Tag]):
"""
Args:
repo: the git repository
tags:
List of tags releaseherald consider as the versions
it needs to collect newsfragments. The plugin is free
to manipulate the list of tags complex filtering can be
implemented here.
"""
pass
@hookspec
def process_commits(repo: Repo, tags: List[Tag], commits: List[CommitInfo]):
"""
The aim of this hook is to collect the list of commits based on the tags.
The plugin supposed to modify the `commits` list. The default plugin, just
turns the tags into [CommitInfo][releaseherald.plugins.interface.CommitInfo].
Args:
repo: the git repository
tags: the tags collected by [process_tags][releaseherald.plugins.hookspecs.process_tags]
commits: Info about each commits
"""
pass
@hookspec
def get_news_between_commits(
repo: Repo,
commit_from: CommitInfo,
commit_to: CommitInfo,
news: List[News],
):
"""
In this hook the plugin can alter the collected `news` between the two commits. It is
called for every consecutive commit pairs processed by
[process_commits][releaseherald.plugins.hookspecs.process_commits].
Args:
repo: the git repository
commit_from: The earlier commit
commit_to: The later commit
news: The list of news that previous plugins collected, can be altered by the plugin
"""
pass
@hookspec
def get_version_news(
repo: Repo,
commit_from: CommitInfo,
commit_to: CommitInfo,
news: List[News],
version_news: MutableProxy[VersionNews],
):
"""
In this hook plugins can produce a wrapper around the the list of news that represent a
version in releaseherald datastructure. Called for every consecutive commit pairs processed by
[process_commits][releaseherald.plugins.hookspecs.process_commits] with the news processed by
[get_news_between_commits][releaseherald.plugins.hookspecs.get_news_between_commits] for the
same two commits.
Args:
repo: the git repository
commit_from: The earlier commit
commit_to: The later commit
news: The list of news collected by
[get_news_between_commits][releaseherald.plugins.hookspecs.get_news_between_commits]
version_news: The version news representing a version with the changes between two commits
"""
pass
@hookspec
def process_version_news(version_news: List[VersionNews]):
"""
This hook give a chance for the plugin to alter the list of versions.
Args:
version_news: All the version/news collected so far
"""
pass
@hookspec
def generate_output(
version_news: List[VersionNews], output: MutableProxy[Output]
):
"""
The plugin can generate an output in memory in any kind of format it want. It also has a
chance to alter or replace an output generated by any previous plugins
Args:
version_news:
All the version/news collected and processed by
[process_version_news][releaseherald.plugins.hookspecs.process_version_news]
output: Output in plugin specific format
"""
pass
@hookspec
def write_output(output: Output):
"""
The plugin should do its final output step here. Write to file, to stdout or send a mail,
upload to some service whatever desired.
Args:
output: the output from [generate_output][releaseherald.plugins.hookspecs.generate_output]
"""
pass
| 28.801136 | 98 | 0.70073 | 668 | 5,069 | 5.236527 | 0.254491 | 0.040881 | 0.038593 | 0.030017 | 0.263579 | 0.226701 | 0.210692 | 0.210692 | 0.18725 | 0.18725 | 0 | 0 | 0.239298 | 5,069 | 175 | 99 | 28.965714 | 0.907158 | 0.655356 | 0 | 0.553571 | 0 | 0 | 0.009496 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.178571 | false | 0.178571 | 0.107143 | 0 | 0.285714 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
cc8d237a2e412d7dfbfd1ba4783f4e96bf2367b1 | 14,998 | py | Python | src/parma/core.py | dougmvieira/parma | d4a57622466ce831024432c151dd69f9a77a71ee | [
"MIT"
] | 3 | 2020-03-04T11:04:18.000Z | 2021-09-27T13:41:37.000Z | src/parma/core.py | dougmvieira/parma | d4a57622466ce831024432c151dd69f9a77a71ee | [
"MIT"
] | null | null | null | src/parma/core.py | dougmvieira/parma | d4a57622466ce831024432c151dd69f9a77a71ee | [
"MIT"
] | 1 | 2020-07-14T20:42:14.000Z | 2020-07-14T20:42:14.000Z | import numpy as np
from scipy.special import xlogy
from .utils import polynomial_powers
class PolyharmonicLagrangeInterpolator:
""" Polyharmonic Lagrange spline interpolation of the function values.
Parameters
----------
x : (n+1)-D array-like
Location of the interpolated values.
Returns
-------
1-D numpy array
Interpolated values at the given locations.
"""
def __init__(self, degree, locs, kernel_coeff, poly_coeff):
self.degree = degree
self.locs = locs
self.kernel_coeff = kernel_coeff
self.poly_coeff = poly_coeff
self.interpolator = polyharmonic_lagrange_interpolator_factory(
degree, locs, kernel_coeff, poly_coeff)
def __call__(self, x):
return self.interpolator(x)
def polyharmonic_lagrange_interpolator_factory(degree, locs, kernel_coeff,
poly_coeff):
powers_list = polynomial_powers(degree, len(locs))
def interpolator(x):
x = np.array(x)
xs = tuple(x[i, ..., None] - locs[i, None, :] for i in range(len(locs)))
kernel_vals = kernel(xs, degree).dot(kernel_coeff)
poly_vals = monomials(x, powers_list).dot(poly_coeff)
return kernel_vals + poly_vals
return interpolator
def multiquadric_kernel(x, bandwidth):
return np.sqrt(1 + bandwidth**2*np.sum(x**2, axis=0))
def multiquadric_kernel_diff(x, axis, bandwidth):
return bandwidth**2*x[axis]/multiquadric_kernel(x, bandwidth)
def multiquadric_kernel_2nd_diff(x, axis, bandwidth):
r2 = np.sum(x**2, axis=0)
bw2 = bandwidth**2
return bw2*(1 + bw2*r2 - bw2*x[axis])/(1 + bw2*r2)**(3/2)
def multiquadric_kernel_cross_diff(x, axis1, axis2, bandwidth):
r2 = np.sum(x**2, axis=0)
return bandwidth**4*x[axis1]*x[axis2]/(1 + bandwidth**2*r2)**(3/2)
def kernel(x, degree):
n = degree
r = np.linalg.norm(x, axis=0)
return xlogy(r, r)*r**(n - 1) if degree % 2 == 0 else r**n
def kernel_diff(x, axis, degree):
n = degree
r = np.linalg.norm(x, axis=0)
return x[axis]*((r + n*xlogy(r, r))*r**(n - 3)
if degree % 2 == 0 else n*r**(n - 2))
def kernel_2nd_diff(x, axis, degree):
n = degree
r = np.linalg.norm(x, axis=0)
k_diff = (r + n*xlogy(r, r))*r**(n - 5) if degree % 2 == 0 else n*r**(n - 4)
k_2nd_diff = ((r*(2*n - 1) + n*(n - 1)*xlogy(r, r))*r**(n - 5)
if degree % 2 == 0 else n*(n - 1)*r**(n - 4))
return k_diff*(r**2 - x[axis]**2) + k_2nd_diff*x[axis]**2
def kernel_cross_diff(x, axis1, axis2, degree):
n = degree
r = np.linalg.norm(x, axis=0)
k_diff = (r + n*xlogy(r, r))*r**(n - 5) if degree % 2 == 0 else n*r**(n - 4)
k_2nd_diff = ((r*(2*n - 1) + n*(n - 1)*xlogy(r, r))*r**(n - 5)
if degree % 2 == 0 else n*(n - 1)*r**(n - 4))
return (k_2nd_diff - k_diff)*x[axis1]*x[axis2]
def monomials(xs, powers_list):
xs = np.stack(xs, axis=-1)
vals = np.zeros((*xs.shape[:-1], len(powers_list)))
for m, ps in enumerate(powers_list):
vals[..., m] = np.prod(np.power(xs, ps), axis=-1)
return vals
def polyharmonic_interpolator(locs, vals, degree):
""" Polyharmonic Lagrange interpolation.
Parameters
----------
locs : (n+1)-D array-like
Location of the samples of dimension 'n'. The first dimension must be
the number of samples.
vals : 1-D array-like
Values of the samples at their corresponding locations.
degree : int
Degree of the polynomial kernel.
Returns
-------
function
Interpolator of function values. See its docstring for more details.
"""
locs = np.array(locs)
powers_list = polynomial_powers(degree, len(locs))
kernel_vals = kernel(locs[:, :, None] - locs[:, None, :], degree)
monomial_vals = monomials(locs, powers_list)
zeros = np.zeros((monomial_vals.shape[1], monomial_vals.shape[1]))
A = np.block([[kernel_vals, monomial_vals],
[monomial_vals.T, zeros]])
b = np.zeros(len(A))
b[:len(vals)] = vals
c = np.linalg.solve(A, b)
c_kernel = c[:len(vals)]
c_poly = c[len(vals):]
return PolyharmonicLagrangeInterpolator(degree, locs, c_kernel, c_poly)
def monomials_diff(xs, axis, powers_list):
xs = np.stack(xs, axis=-1)
elsewhere = np.arange(xs.shape[-1]) != axis
vals = np.zeros((*xs.shape[:-1], len(powers_list)))
for m, ps in enumerate(powers_list):
if ps[axis] > 0:
vals[..., m] = ps[axis]*xs[..., axis]**(ps[axis] - 1)
vals[..., m] *= np.prod(
np.power(xs[..., elsewhere], ps[elsewhere]), axis=-1)
return vals
def polyharmonic_hermite_interpolator(locs, vals, hermite_axes, hermite_vals,
degree):
""" Polyharmonic Hermite interpolation.
Parameters
----------
locs : (n+1)-D array-like
Location of the samples of dimension 'n'. The first dimension must be
the number of samples.
vals : 1-D array-like
Values of the samples at their corresponding locations.
hermite_axes : 1-D array-like of ints
Dimensions at which the derivatives information is available. The list
is a zero-based numbering.
hermite_vals : (n+1)-D array-like
Derivatives of the samples at their corresponding locations and
specified dimensions. The first dimension must match the length of
the argument `hermite_axes`.
Returns
-------
tuple of functions
Pair of functions: (i) interpolation of function values and (ii)
interpolation of function derivatives. See their docstrings for more
details.
"""
locs = np.array(locs)
n_dims, n_data, n_haxes = len(locs), len(vals), len(hermite_axes)
powers_list = np.array(polynomial_powers(degree, n_dims))
n_poly = len(powers_list)
kernel_locs = locs[:, :, None] - locs[:, None, :]
kernel_vals = kernel(kernel_locs, degree)
kernel_diff_vals = [kernel_diff(kernel_locs, axis, degree)
for axis in hermite_axes]
kernel_2nd_diff_vals = np.zeros((n_haxes, n_haxes, n_data, n_data))
for i in hermite_axes:
kernel_2nd_diff_vals[i, i, ...] = kernel_2nd_diff(kernel_locs, i,
degree)
for j in range(i):
cross_diffs = kernel_cross_diff(kernel_locs, i, j, degree)
kernel_2nd_diff_vals[i, j, ...] = cross_diffs
kernel_2nd_diff_vals[j, i, ...] = cross_diffs
monomial_vals = monomials(locs, powers_list)
monomials_diff_vals = [monomials_diff(locs, axis, powers_list)
for axis in hermite_axes]
A_zeros = np.zeros((n_poly, n_poly))
A_kernel = [kernel_vals, *kernel_diff_vals, monomial_vals]
A_kernel_diff = [[kernel_diff_vals[ax_i], *kernel_2nd_diff_vals[ax_i],
monomials_diff_vals[ax_i]] for ax_i in hermite_axes]
A_poly = [monomial_vals.T, *map(np.transpose, monomials_diff_vals), A_zeros]
A = np.block([A_kernel, *A_kernel_diff, A_poly])
b = np.concatenate([vals, *hermite_vals, np.zeros(n_poly)])
c = np.linalg.solve(A, b)
c_kernel = c[:n_data]
c_diff_kernel = np.split(c[n_data:-n_poly], n_haxes)
c_poly = c[-n_poly:]
def interpolator(x):
""" Polyharmonic Hermite spline interpolation of the function values.
Parameters
----------
x : (n+1)-D array-like
Location of the interpolated values.
Returns
-------
1-D numpy array
Interpolated values at the given locations.
"""
x = np.array(x)
kernel_xs = tuple(x[i, ..., None] - locs[i, None, :]
for i in range(n_dims))
kernel_vals = kernel(kernel_xs, degree).dot(c_kernel)
kernel_diff_vals = [
kernel_diff(kernel_xs, axis, degree).dot(c_diff_kernel[axis])
for axis in hermite_axes]
poly_vals = monomials(x, powers_list).dot(c_poly)
return kernel_vals + sum(kernel_diff_vals) + poly_vals
def interpolator_diff(x, axis):
""" Polyharmonic Hermite interpolation of the function derivatives.
Parameters
----------
x : (n+1)-D array-like
Locations of the interpolated derivatives.
axis : int
Dimension on which the derivative is interpolated.
Returns
-------
1-D numpy array
Interpolated derivatives at the given locations.
"""
x = np.array(x)
kernel_xs = tuple(x[i, ..., None] - locs[i, None, :]
for i in range(n_dims))
kernel_diff_vals = kernel_diff(kernel_xs, axis, degree).dot(c_kernel)
kernel_2nd_diff_vals = kernel_2nd_diff(kernel_xs, axis,
degree).dot(c_diff_kernel[axis])
kernel_cross_diff_vals = [
kernel_cross_diff(kernel_xs, axis, axis_j,
degree).dot(c_diff_kernel[axis_j])
for axis_j in hermite_axes if axis != axis_j]
poly_vals = monomials_diff(x, axis, powers_list).dot(c_poly)
return (kernel_diff_vals + kernel_2nd_diff_vals
+ sum(kernel_cross_diff_vals) + poly_vals)
return interpolator, interpolator_diff
def multiquadric_hermite_linear_system(locs, vals, hermite_axes, hermite_vals,
bandwidth):
locs = np.array(locs)
n_data, n_haxes = len(vals), len(hermite_axes)
kernel_locs = locs[:, :, None] - locs[:, None, :]
kernel_vals = multiquadric_kernel(kernel_locs, bandwidth)
kernel_diff_vals = [multiquadric_kernel_diff(kernel_locs, axis, bandwidth)
for axis in hermite_axes]
kernel_2nd_diff_vals = np.zeros((n_haxes, n_haxes, n_data, n_data))
for i in hermite_axes:
kernel_2nd_diff_vals[i, i, ...] = multiquadric_kernel_2nd_diff(kernel_locs, i,
bandwidth)
for j in range(i):
cross_diffs = multiquadric_kernel_cross_diff(kernel_locs, i, j, bandwidth)
kernel_2nd_diff_vals[i, j, ...] = cross_diffs
kernel_2nd_diff_vals[j, i, ...] = cross_diffs
A_kernel = [kernel_vals, *kernel_diff_vals]
A_kernel_diff = [[kernel_diff_vals[ax_i], *kernel_2nd_diff_vals[ax_i]]
for ax_i in hermite_axes]
A = np.block([A_kernel, *A_kernel_diff])
b = np.concatenate([vals, *hermite_vals])
return A, b
def multiquadric_hermite_cross_validation_loss(locs, vals, hermite_axes, hermite_vals,
bandwidth):
A, b = multiquadric_hermite_linear_system(locs, vals, hermite_axes, hermite_vals,
bandwidth)
Ainv = np.linalg.inv(A)
c = Ainv.dot(b)
return np.sum((c/np.diag(Ainv))**2)
def multiquadric_hermite_interpolator(locs, vals, hermite_axes, hermite_vals,
bandwidth=None):
""" Multiquadric Hermite interpolation.
Parameters
----------
locs : (n+1)-D array-like
Location of the samples of dimension 'n'. The first dimension must be
the number of samples.
vals : 1-D array-like
Values of the samples at their corresponding locations.
hermite_axes : 1-D array-like of ints
Dimensions at which the derivatives information is available. The list
is a zero-based numbering.
hermite_vals : (n+1)-D array-like
Derivatives of the samples at their corresponding locations and
specified dimensions. The first dimension must match the length of
the argument `hermite_axes`.
bandwidth : float, optional
Bandwidth of the multiquadric kernel. If `None`, the bandwidth is
computed via cross-validation. Defaults to `None`.
Returns
-------
tuple of functions
Pair of functions: (i) interpolation of function values and (ii)
interpolation of function derivatives. See their docstrings for more
details.
"""
locs = np.array(locs)
n_dims, n_data, n_haxes = len(locs), len(vals), len(hermite_axes)
if bandwidth is None:
bws = np.exp(np.linspace(np.log(1e-2), np.log(1e3), 500))
losses = [multiquadric_hermite_cross_validation_loss(
locs, vals, hermite_axes, hermite_vals, bw) for bw in bws]
bandwidth = bws[np.argmin(losses)]
A, b = multiquadric_hermite_linear_system(locs, vals, hermite_axes,
hermite_vals, bandwidth)
c = np.linalg.solve(A, b)
c_kernel = c[:n_data]
c_diff_kernel = np.split(c[n_data:], n_haxes)
def interpolator(x):
""" Multiquadric Hermite spline interpolation of the function values.
Parameters
----------
x : (n+1)-D array-like
Location of the interpolated values.
Returns
-------
1-D numpy array
Interpolated values at the given locations.
"""
x = np.array(x)
kernel_xs = tuple(x[i, ..., None] - locs[i, None, :]
for i in range(n_dims))
kernel_vals = multiquadric_kernel(np.array(kernel_xs),
bandwidth).dot(c_kernel)
kernel_diff_vals = [multiquadric_kernel_diff(
np.array(kernel_xs), axis, bandwidth).dot(c_diff_kernel[axis])
for axis in hermite_axes]
return kernel_vals + sum(kernel_diff_vals)
def interpolator_diff(x, axis):
""" Multiquadric Hermite interpolation of the function derivatives.
Parameters
----------
x : (n+1)-D array-like
Locations of the interpolated derivatives.
axis : int
Dimension on which the derivative is interpolated.
Returns
-------
1-D numpy array
Interpolated derivatives at the given locations.
"""
x = np.array(x)
kernel_xs = tuple(x[i, ..., None] - locs[i, None, :]
for i in range(n_dims))
kernel_diff_vals = multiquadric_kernel_diff(np.array(kernel_xs), axis,
bandwidth).dot(c_kernel)
kernel_2nd_diff_vals = multiquadric_kernel_2nd_diff(
np.array(kernel_xs), axis, bandwidth).dot(c_diff_kernel[axis])
kernel_cross_diff_vals = [
multiquadric_kernel_cross_diff(np.array(kernel_xs), axis, axis_j,
bandwidth).dot(c_diff_kernel[axis_j])
for axis_j in hermite_axes if axis != axis_j]
return (kernel_diff_vals + kernel_2nd_diff_vals
+ sum(kernel_cross_diff_vals))
return interpolator, interpolator_diff
| 33.254989 | 86 | 0.599413 | 2,007 | 14,998 | 4.27703 | 0.083707 | 0.032619 | 0.030289 | 0.019222 | 0.757805 | 0.719362 | 0.660415 | 0.598788 | 0.559646 | 0.552423 | 0 | 0.012903 | 0.286905 | 14,998 | 450 | 87 | 33.328889 | 0.789715 | 0.235765 | 0 | 0.424528 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.108491 | false | 0 | 0.014151 | 0.014151 | 0.231132 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
cc8dc0e848d31912c53044fdd2c28a6cdf9c7bb2 | 4,580 | py | Python | pippin/classifiers/unity.py | dessn/Pippin | 0a62eafc40a3ab290d4dbed809c51f69862f99db | [
"MIT"
] | 5 | 2019-06-05T09:17:40.000Z | 2020-12-16T19:46:39.000Z | pippin/classifiers/unity.py | Samreay/Pippin | ed5efaf93b389e7b0504e122c91687a6fd4efb2e | [
"MIT"
] | 53 | 2020-04-08T14:03:44.000Z | 2021-12-17T12:17:15.000Z | pippin/classifiers/unity.py | dessn/Pippin | 0a62eafc40a3ab290d4dbed809c51f69862f99db | [
"MIT"
] | 7 | 2019-02-19T18:18:59.000Z | 2021-03-29T23:17:59.000Z | import os
import shutil
import subprocess
import pandas as pd
from astropy.io import fits
import numpy as np
from pippin.classifiers.classifier import Classifier
from pippin.config import chown_dir, mkdirs
from pippin.task import Task
class UnityClassifier(Classifier):
""" Classification task for the SuperNNova classifier.
CONFIGURATION
=============
CLASSIFICATION:
label:
MASK_SIM: mask # partial match
MASK_FIT: mask # partial match
MASK: mask # partial match
MODE: predict
OUTPUTS:
========
name : name given in the yml
output_dir: top level output directory
prob_column_name: name of the column to get probabilities out of
predictions_filename: location of csv filename with id/probs
"""
def __init__(self, name, output_dir, config, dependencies, mode, options, index=0, model_name=None):
super().__init__(name, output_dir, config, dependencies, mode, options, index=index, model_name=model_name)
self.output_file = None
self.passed = False
self.num_jobs = 1 # This is the default. Can get this from options if needed.
self.output_file = os.path.join(self.output_dir, "predictions.csv")
self.output.update({"predictions_filename": self.output_file})
def get_unique_name(self):
return "UNITY"
def classify(self):
new_hash = self.get_hash_from_string(self.name)
if self._check_regenerate(new_hash):
shutil.rmtree(self.output_dir, ignore_errors=True)
mkdirs(self.output_dir)
try:
name = self.get_prob_column_name()
cid = "CID"
s = self.get_simulation_dependency()
df = None
phot_dir = s.output["photometry_dirs"][self.index]
headers = [os.path.join(phot_dir, a) for a in os.listdir(phot_dir) if "HEAD" in a]
if len(headers) == 0:
self.logger.warning(f"No HEAD fits files found in {phot_dir}! Going to do it manually, this may not work.")
cmd = "grep --exclude-dir=* SNID: * | awk -F ':' '{print $3}'"
self.logger.debug(f"Running command {cmd}")
process = subprocess.run(cmd, capture_output=True, cwd=phot_dir, shell=True)
output = process.stdout.decode("ascii").split("\n")
output = [x for x in output if x]
snid = [x.strip() for x in output]
df = pd.DataFrame({cid: snid, name: np.ones(len(snid))})
df.drop_duplicates(subset=cid, inplace=True)
else:
for h in headers:
with fits.open(h) as hdul:
data = hdul[1].data
snid = np.array(data.field("SNID"))
dataframe = pd.DataFrame({cid: snid, name: np.ones(snid.shape)})
dataframe[cid] = dataframe[cid].apply(str)
dataframe[cid] = dataframe[cid].str.strip()
if df is None:
df = dataframe
else:
df = pd.concat([df, dataframe])
df.drop_duplicates(subset=cid, inplace=True)
self.logger.info(f"Saving probabilities to {self.output_file}")
df.to_csv(self.output_file, index=False, float_format="%0.4f")
chown_dir(self.output_dir)
with open(self.done_file, "w") as f:
f.write("SUCCESS")
self.save_new_hash(new_hash)
except Exception as e:
self.logger.exception(e, exc_info=True)
self.passed = False
with open(self.done_file, "w") as f:
f.write("FAILED")
return False
else:
self.should_be_done()
self.passed = True
return True
def _check_completion(self, squeue):
if not self.passed:
if os.path.exists(self.done_file):
with open(self.done_file) as f:
self.passed = "SUCCESS" in f.read()
return Task.FINISHED_SUCCESS if self.passed else Task.FINISHED_FAILURE
def train(self):
return self.classify()
def predict(self):
return self.classify()
@staticmethod
def get_requirements(config):
return True, False
| 38.166667 | 127 | 0.554803 | 544 | 4,580 | 4.536765 | 0.347426 | 0.040519 | 0.028363 | 0.019449 | 0.122366 | 0.114263 | 0.114263 | 0.062399 | 0.024311 | 0.024311 | 0 | 0.002348 | 0.349127 | 4,580 | 119 | 128 | 38.487395 | 0.825562 | 0.116376 | 0 | 0.13253 | 0 | 0 | 0.075937 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.084337 | false | 0.072289 | 0.108434 | 0.048193 | 0.289157 | 0.012048 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
cc95285c97c4e2dc79a1d7498eb1fe063d70402a | 1,069 | py | Python | Resene naloge/euler26.py | CadezDavid/ProjectEuler | 9e11aa5782fb600c98eba9e04766b3bd79acea0e | [
"MIT"
] | null | null | null | Resene naloge/euler26.py | CadezDavid/ProjectEuler | 9e11aa5782fb600c98eba9e04766b3bd79acea0e | [
"MIT"
] | null | null | null | Resene naloge/euler26.py | CadezDavid/ProjectEuler | 9e11aa5782fb600c98eba9e04766b3bd79acea0e | [
"MIT"
] | null | null | null | def decimalni_zapis(deljenec, delitelj):
memo = set()
decimalni_zapis = ''
while (deljenec, delitelj) not in memo:
if deljenec % delitelj == 0:
decimalni_zapis += str(deljenec // delitelj)
return decimalni_zapis, deljenec, delitelj
elif deljenec < delitelj:
decimalni_zapis += '0'
memo.add((deljenec, delitelj))
deljenec *= 10
else:
decimalni_zapis += str(deljenec // delitelj)
memo.add((deljenec, delitelj))
deljenec = deljenec % delitelj * 10
return decimalni_zapis
# def je_prastevilo(n):
# if n < 2 or n == 4 or n == 6 or n == 8:
# return False
# i = 3
# while i*i <= n:
# if n % i == 0 or n % 2 == 0:
# return False
# i += 2
# return True
slovar = dict()
for i in range(2, 1000):
slovar[i] = len(decimalni_zapis(1, i))
# for stevilo in range(1, 10 ** 6):
# if len(decimalni_zapis(1, stevilo)) == stevilo:
# print(stevilo)
print(max(slovar, key=slovar.get)) | 29.694444 | 56 | 0.549111 | 133 | 1,069 | 4.338346 | 0.315789 | 0.277296 | 0.076257 | 0.103986 | 0.221837 | 0 | 0 | 0 | 0 | 0 | 0 | 0.036212 | 0.328344 | 1,069 | 36 | 57 | 29.694444 | 0.767409 | 0.297474 | 0 | 0.2 | 0 | 0 | 0.001353 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.05 | false | 0 | 0 | 0 | 0.15 | 0.05 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
cc95fbbb6d4d0db59f783e3386e87ef44d181eec | 2,931 | py | Python | wikipya/drivers.py | jDan735/wikipya | d9f9b18d92f2e7c2a0a319214a5da3f33bd1162d | [
"MIT"
] | 2 | 2021-08-02T21:49:35.000Z | 2021-08-25T14:15:13.000Z | wikipya/drivers.py | jDan735/wikipya | d9f9b18d92f2e7c2a0a319214a5da3f33bd1162d | [
"MIT"
] | 18 | 2021-11-14T08:07:43.000Z | 2022-01-02T22:31:35.000Z | wikipya/drivers.py | jDan735/wikipya | d9f9b18d92f2e7c2a0a319214a5da3f33bd1162d | [
"MIT"
] | null | null | null | try:
import aiohttp
except:
pass
try:
import httpx
except:
pass
import time
import json
from .exceptions import ParseError
class JSONObject:
"""JSON => Class"""
def __init__(self, dict):
self.add(dict)
def add(self, dict):
vars(self).update(dict)
class BaseDriver:
def __init__(self, url="example.com", timeout=5, params=()):
self.timeout = timeout
self.params = params
self.url = url
async def get(self, url, params=(), timeout=None):
raise NotImplementedError
class AiohttpDriver(BaseDriver):
async def get(self, url=None, timeout=None, debug=False, **params):
if len(list(params.keys())) == 0:
params = None
else:
params = {**self.params, **params}
async with aiohttp.ClientSession() as session:
start = time.time()
async with session.get(
url or self.url, params=params,
timeout=timeout or self.timeout
) as response:
text = await response.text()
if debug:
print(response.url)
print(time.time() - start)
js = json.loads(text, object_hook=JSONObject)
if isinstance(js, list):
pass
elif js.__dict__.get("error") is not None:
raise ParseError(f"{js.error.code}: {js.error.info}")
return js
async def get_html(self, url=None, timeout=None, debug=False, **params):
if len(list(params.keys())) == 0:
params = None
else:
params = {**self.params, **params}
async with aiohttp.ClientSession() as session:
start = time.time()
async with session.get(
url or self.url, params=params,
timeout=timeout or self.timeout
) as response:
text = await response.text()
if debug:
print(response.url)
print(time.time() - start)
return response.status, text, response.url
class HttpxDriver(BaseDriver):
async def get(self, url=None, timeout=None, debug=False, **params):
async with httpx.AsyncClient(timeout=timeout) as client:
res = await client.get(url or self.url,
params={**self.params, **params})
js = json.loads(res.text, object_hook=JSONObject)
if not isinstance(js, list) and res.json().get("error"):
raise ParseError(f"{js.error.code}: {js.error.info}")
return js
async def get_html(self, url=None, timeout=None, debug=False, **params):
async with httpx.AsyncClient(timeout=timeout) as client:
res = await client.get(url or self.url, params=params)
return res.status_code, res.text, res.url
| 28.182692 | 76 | 0.551689 | 335 | 2,931 | 4.776119 | 0.21194 | 0.048125 | 0.034375 | 0.045 | 0.65875 | 0.615 | 0.615 | 0.61125 | 0.61125 | 0.61125 | 0 | 0.001546 | 0.337769 | 2,931 | 103 | 77 | 28.456311 | 0.822772 | 0.004435 | 0 | 0.608108 | 0 | 0 | 0.02919 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.040541 | false | 0.040541 | 0.067568 | 0 | 0.216216 | 0.054054 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
cc98ec601b89e946984916168b2410bb54ca7c7c | 889 | py | Python | fts/models.py | filwaitman/django-fts | af9f10d760e543288c21a8f4239288c2e415b9e9 | [
"BSD-3-Clause"
] | null | null | null | fts/models.py | filwaitman/django-fts | af9f10d760e543288c21a8f4239288c2e415b9e9 | [
"BSD-3-Clause"
] | 9 | 2020-02-11T23:38:20.000Z | 2022-03-11T23:16:29.000Z | 3rdparty/odeoncg-django-fts-odeon-9ea3a64/fts/models.py | cltrudeau/django-yacon | d462c88cf98bf8eef50a0696b265fa28dfdb40eb | [
"MIT"
] | 2 | 2017-09-10T11:27:51.000Z | 2019-12-28T00:12:58.000Z | """
Full Text Search Framework
"""
from django.db import models
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes import generic
from fts.settings import *
if FTS_BACKEND.startswith('simple://'):
class IndexWord(models.Model):
word = models.CharField(unique=True, db_index=True, blank=False, max_length=100)
def __unicode__(self):
return u"%s" % (self.word)
class Index(models.Model):
word = models.ForeignKey(IndexWord)
weight = models.IntegerField()
content_type = models.ForeignKey(ContentType)
object_id = models.PositiveIntegerField()
content_object = generic.GenericForeignKey('content_type', 'object_id')
def __unicode__(self):
return u'%s [%s]' % (self.content_object, self.word.word)
| 34.192308 | 89 | 0.650169 | 97 | 889 | 5.783505 | 0.484536 | 0.053476 | 0.060606 | 0.103387 | 0.078431 | 0.078431 | 0 | 0 | 0 | 0 | 0 | 0.004471 | 0.245219 | 889 | 25 | 90 | 35.56 | 0.831595 | 0.029246 | 0 | 0.117647 | 0 | 0 | 0.046988 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.117647 | false | 0 | 0.235294 | 0.117647 | 0.941176 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 |
cc98fd3cd69cc032a3010b1c76cbca399e3ae91c | 554 | py | Python | experiencia/models.py | JVacca12/FIRST | e3906209cae1198e1fbda4d00bc0a906e8294a69 | [
"MIT"
] | null | null | null | experiencia/models.py | JVacca12/FIRST | e3906209cae1198e1fbda4d00bc0a906e8294a69 | [
"MIT"
] | null | null | null | experiencia/models.py | JVacca12/FIRST | e3906209cae1198e1fbda4d00bc0a906e8294a69 | [
"MIT"
] | null | null | null | from django.db import models
from users.models import User
# Create your models here.
class Experiencia(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE, related_name='experiencia')
cargo = models.CharField(max_length=120)
fecha_inicio = models.DateTimeField()
fecha_fin = models.DateTimeField(null=True)
empresa = models.CharField(max_length=50)
def __str__(self):
"""Return company and first_name and last_name."""
return f'{self.user.first_name} {self.user.last_name} | {self.empresa}'
| 34.625 | 88 | 0.729242 | 74 | 554 | 5.27027 | 0.554054 | 0.076923 | 0.092308 | 0.123077 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.01073 | 0.158845 | 554 | 15 | 89 | 36.933333 | 0.82618 | 0.126354 | 0 | 0 | 0 | 0 | 0.150628 | 0.089958 | 0 | 0 | 0 | 0 | 0 | 1 | 0.1 | false | 0 | 0.2 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
cc9dceb59760562931da720543a6d8a6e4bd9eeb | 331 | py | Python | Task/Gray-code/Python/gray-code-1.py | LaudateCorpus1/RosettaCodeData | 9ad63ea473a958506c041077f1d810c0c7c8c18d | [
"Info-ZIP"
] | 1 | 2018-11-09T22:08:38.000Z | 2018-11-09T22:08:38.000Z | Task/Gray-code/Python/gray-code-1.py | seanwallawalla-forks/RosettaCodeData | 9ad63ea473a958506c041077f1d810c0c7c8c18d | [
"Info-ZIP"
] | null | null | null | Task/Gray-code/Python/gray-code-1.py | seanwallawalla-forks/RosettaCodeData | 9ad63ea473a958506c041077f1d810c0c7c8c18d | [
"Info-ZIP"
] | 1 | 2018-11-09T22:08:40.000Z | 2018-11-09T22:08:40.000Z | >>> def int2bin(n):
'From positive integer to list of binary bits, msb at index 0'
if n:
bits = []
while n:
n,remainder = divmod(n, 2)
bits.insert(0, remainder)
return bits
else: return [0]
>>> def bin2int(bits):
'From binary bits, msb at index 0 to integer'
i = 0
for bit in bits:
i = i * 2 + bit
return i
| 18.388889 | 63 | 0.622356 | 58 | 331 | 3.551724 | 0.482759 | 0.097087 | 0.126214 | 0.145631 | 0.203884 | 0.203884 | 0 | 0 | 0 | 0 | 0 | 0.036585 | 0.256798 | 331 | 17 | 64 | 19.470588 | 0.800813 | 0 | 0 | 0 | 0 | 0 | 0.311178 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0 | null | null | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
ccae49d18a7c7f2cb30e791d9de0124bc90b8598 | 341 | py | Python | cms_people/urls/people.py | python-spain/djangocms-people | 406a64b49ed152d615e421b47aef7366083b5376 | [
"MIT"
] | 1 | 2016-06-17T23:45:07.000Z | 2016-06-17T23:45:07.000Z | cms_people/urls/people.py | python-spain/djangocms-people | 406a64b49ed152d615e421b47aef7366083b5376 | [
"MIT"
] | null | null | null | cms_people/urls/people.py | python-spain/djangocms-people | 406a64b49ed152d615e421b47aef7366083b5376 | [
"MIT"
] | null | null | null | from django.conf.urls import url
from cms_people.views.people import PersonView, PeopleView, PeopleMapResultView
urlpatterns = [
url('^people_map_result$', PeopleMapResultView.as_view(), name='people_map_result'),
url('^(?P<username>.+)$', PersonView.as_view(), name='person'),
url('^$', PeopleView.as_view(), name='people'),
] | 37.888889 | 88 | 0.715543 | 41 | 341 | 5.756098 | 0.512195 | 0.076271 | 0.127119 | 0.135593 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.105572 | 341 | 9 | 89 | 37.888889 | 0.77377 | 0 | 0 | 0 | 0 | 0 | 0.19883 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.285714 | 0 | 0.285714 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
ccc8a7055151febfd87f8a6c4b69bb3cd3d03357 | 525 | py | Python | DAO/Connection.py | rennancockles/gigapy | 4fb0cd97e689c6460254a466a7a8d9137c2392b1 | [
"MIT"
] | null | null | null | DAO/Connection.py | rennancockles/gigapy | 4fb0cd97e689c6460254a466a7a8d9137c2392b1 | [
"MIT"
] | null | null | null | DAO/Connection.py | rennancockles/gigapy | 4fb0cd97e689c6460254a466a7a8d9137c2392b1 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
import MySQLdb
class Connection(object):
def __init__(self):
self.db = MySQLdb.connect(host="localhost", # your host, usually localhost
user="root", # your username
passwd="123", # your password
db="gigapy") # name of the data base
self.cursor = self.db.cursor()
def close_connection(self):
self.cursor.close()
self.db.close()
conn = Connection()
| 23.863636 | 83 | 0.508571 | 53 | 525 | 4.943396 | 0.603774 | 0.068702 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.012232 | 0.377143 | 525 | 21 | 84 | 25 | 0.788991 | 0.190476 | 0 | 0 | 0 | 0 | 0.052506 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.166667 | false | 0.083333 | 0.083333 | 0 | 0.333333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
aeaaff03b3c3037657036c9589f3d3b3cbf242f5 | 379 | py | Python | app/test/pickle-dump.py | sappachok/django-anaconda | 1ffd33ded759f622b6db23a3550a898b62350403 | [
"MIT"
] | null | null | null | app/test/pickle-dump.py | sappachok/django-anaconda | 1ffd33ded759f622b6db23a3550a898b62350403 | [
"MIT"
] | 7 | 2019-12-06T05:34:28.000Z | 2021-06-10T18:25:17.000Z | app/test/pickle-dump.py | sappachok/django-datasci | 1ffd33ded759f622b6db23a3550a898b62350403 | [
"MIT"
] | null | null | null |
import pickle
import pickleout
example_dict = {1:"6",2:"2",3:"f", 4:"goo"}
pickle_out = open("dict.pickle","wb")
pickle.dump(globals(), pickle_out, protocol=pickle.HIGHEST_PROTOCOL)
#pickle.dump(example_dict, pickle_out)
#allvar = locals()
#pickle.dump(allvar, pickle_out)
pickle_out.close()
#for i in locals():
# print(i)
# x = locals()
#print()
#pickleout.output()
| 16.478261 | 68 | 0.691293 | 56 | 379 | 4.535714 | 0.5 | 0.177165 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.018018 | 0.121372 | 379 | 22 | 69 | 17.227273 | 0.744745 | 0.403694 | 0 | 0 | 0 | 0 | 0.087558 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.333333 | 0 | 0.333333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
aeb7dc1515707a50d032456897d2a7ba0070a944 | 25,924 | py | Python | lib/python/data/waves.py | HELIO-HFC/RABAT3 | 0a399336071e38e0f3b39d1bc939a5f589da6ab8 | [
"MIT"
] | null | null | null | lib/python/data/waves.py | HELIO-HFC/RABAT3 | 0a399336071e38e0f3b39d1bc939a5f589da6ab8 | [
"MIT"
] | null | null | null | lib/python/data/waves.py | HELIO-HFC/RABAT3 | 0a399336071e38e0f3b39d1bc939a5f589da6ab8 | [
"MIT"
] | null | null | null | #! /usr/bin/env python
# -*- coding: latin-1 -*-
"""
Python library to get and read Wind/Waves data.
@author: X.Bonnin (LESIA)
"""
__author__="Xavier Bonnin"
__date__="08-MAY-2013"
__version__ = "1.0.1"
import os, sys
import re, urllib2
from urlparse import urlparse
import time, subprocess
import numpy as np
from copy import deepcopy
from datetime import datetime
from PIL import Image
from scipy.misc import bytescale
from scipy.interpolate import barycentric_interpolate
import logging
CURRENT_DIRECTORY = os.getcwd()
#Distant servers where data are stored
GSFC_URL = "ftp://stereowaves.gsfc.nasa.gov" # NASA/GSFC
LESIA_URL = "ftp://sorbet.obspm.fr" #Observatoire de Paris/LESIA
# Date and time formats
WAVES_TFORMAT = "%Y%m%d"
# Min val
MIN_VAL=1.0e-30
# Wind/Waves receiver parameters
RAD1_BANDWIDTH=3.0 #kHz
RAD1_INT_TIME_S=154 #ms
RAD1_INT_TIME_Z=308 #ms
RAD1_FREQ_STEP=4.0 #kHz
RAD1_FREQ_MIN=20.0 #kHz
RAD1_FREQ_NUM=256
RAD2_BANDWIDTH=20.0 #kHz
RAD2_INT_TIME_S=20 #ms
RAD2_INT_TIME_Z=40 #ms
RAD2_FREQ_STEP=50.0 #kHz
RAD2_FREQ_MIN=1075.0 #kHz
RAD2_FREQ_NUM=256
TNR_BANDWIDTH=None #kHz --> TBD
TNR_INT_TIME=None #ms --> TBD
TNR_FREQ_STEP=None #kHz
TNR_FREQ_MIN=4.0 #kHz
TNR_FREQ_NUM=96
# logger name
WAVES_LOGGER="waves"
# Wind/Waves class
class wind():
def __init__(self,
provider="gsfc",
receiver="rad2",
dataset="l2_60s",
username=None,
password=None,
verbose=True):
self.provider=provider
self.observatory="Wind"
self.instrument="Waves"
self.receiver=receiver
self.dataset=dataset
self.username=username
self.password=password
# Setup the logging
setup_logging(filename=None,quiet = False, verbose = verbose)
global LOG
LOG=logging.getLogger(WAVES_LOGGER)
def get_filename(self,date,
receiver=None,
provider=None,
dataset=None,
verbose=True):
"""
This method returns the name of
the Waves file providing the
date of observation.
"""
if not isinstance(date,datetime):
LOG.error("Input date must be a datetime object!")
return None
if (provider is None):
provider = self.provider
if (receiver is None):
receiver = self.receiver
if (dataset is None):
dataset=self.dataset
pro = provider.lower()
rec = receiver.lower()
ds = dataset.lower()
if (pro == "gsfc"):
filename = date.strftime(WAVES_TFORMAT)
if (rec == "rad1"):
filename += ".R1.Z"
elif (rec == "rad2"):
filename += ".R2.Z"
elif (rec == "tnr"):
filename += ".tnr.Z"
else:
LOG.error("Unknown receiver!")
return None
elif (pro == "lesia"):
filename = date.strftime(WAVES_TFORMAT)
if (rec == "rad1"):
if (ds == "l2_hres"):
filename="WIN_RAD1_"+filename+".B3E"
elif (ds == "l2_60s"):
filename="WIN_RAD1_60S_"+filename+".B3E"
elif (ds == "l3_df"):
filename="WIN_RAD1_DF_"+filename+".B3E"
elif (ds == "l3_gp"):
filename="WIN_RAD1_DF_"+filename+".B3E"
else:
LOG.error("Unknown data set!")
return None
elif (rec == "rad2"):
if (ds == "l2_hres"):
filename="WIN_RAD2_"+filename+".B3E"
elif (ds == "l2_60s"):
filename="WIN_RAD2_60S_"+filename+".B3E"
elif (ds == "l3_sfu"):
filename="WIN_RAD2_SFU_"+filename+".B3E"
else:
LOG.error("Unknown data set!")
return None
elif (rec == "tnr"):
LOG.warning("TNR data set not available yet!")
return None
else:
LOG.error("Unknown data set!")
return None
else:
LOG.error("Unknown data provider!")
return None
return filename
def get_date(self,filename,
provider=None):
"""
This method returns the date of
observation of a given file.
"""
if (provider is None):
provider = self.provider
pro = provider.lower()
if (pro == "gsfc"):
date = datetime.strptime(os.path.basename(filename)[0:8],WAVES_TFORMAT)
elif (pro == "lesia"):
date = datetime.strptime(os.path.basename(filename.split("_")[-1][0:8]),WAVES_TFORMAT)
else:
LOG.warning("Unknown data provider!")
return None
return date
def get_rec(self,filename,
provider=None):
"""
This method returns the receiver name
providing the name of the file.
"""
if (provider is None):
provider = self.provider
pro=provider.lower()
basename = os.path.basename(filename)
if (pro == "gsfc"):
if ((re.search("\d{8}.R1",basename)) or
(re.search("\d{8}.R1.Z",basename))):
return "rad1"
elif ((re.search("\d{8}.R2",basename)) or
(re.search("\d{8}.R2.Z",basename))):
return "rad2"
elif ((re.search("\d{8}.tnr",basename)) or
(re.search("\d{8}.tnr.Z",basename))):
return "tnr"
else:
LOG.error("Wrong GSFC Waves filename!")
return None
elif (pro == "lesia"):
return basename.split("_")[1].lower()
else:
LOG.error("Unknown data provider!")
return None
def get_dataset(self,filename,
provider=None):
"""
This method returns the dataset
for a given filename.
"""
if (provider is None):
provider=self.provider
pro = provider.lower()
basename = os.path.basename(filename)
if (pro == "gsfc"):
return "l2_60s"
elif (pro == "lesia"):
if (re.search("WIN_\w{3,4}_\d{8}.B3E",basename)):
return "l2_hres"
elif (re.search("WIN_\w{3,4}_60S_\d{8}.B3E",basename)):
return "l2_60s"
elif (re.search("WIN_\w{3,4}_DF_\d{8}.B3E",basename)):
return "l3_df"
elif (re.search("WIN_\w{3,4}_GP_\d{8}.B3E",basename)):
return "l3_gp"
elif (re.search("WIN_\w{3,4}_SFU_\d{8}.B3E",basename)):
return "l3_sfu"
else:
LOG.error("Unknown type of file!")
return None
else:
LOG.error("Unknown data provider!")
return None
def get_url(self,date=None,
receiver=None,
filename=None,
provider=None,
dataset=None,
verbose=True):
"""
This method returns the url a
the Waves data file.
"""
if (provider is None):
provider=self.provider
pro = provider.lower()
if (dataset is None):
dataset=self.dataset
ds = dataset.lower()
if (filename is None):
if (receiver is None):
receiver = self.receiver
rec = receiver.lower()
basename = self.get_filename(date,receiver=rec,
provider=provider,
dataset=ds,
verbose=verbose)
else:
rec = self.get_rec(filename)
basename = os.path.basename(filename)
if (pro == "gsfc"):
url = GSFC_URL
if (rec == "rad1"):
url += "/wind_rad1/rad1a"
elif (rec == "rad2"):
url += "/wind_rad2/rad2a"
elif (rec == "tnr"):
url += "/wind_tnr/tnra"
else:
LOG.error("Unknown receiver!")
return None
elif (pro == "lesia"):
url = LESIA_URL
url += "/WindServer/Data/WIND_Data/CDPP/%s" % (rec)
if (ds == "l2_hres"):
url+="/l2/h_res"
elif (ds == "l2_60s"):
url+="/l2/average"
elif (ds == "l3_df"):
url+="/l3/df"
elif (ds == "l3_gp"):
url+="/l3/gp"
elif (ds == "l3_sfu"):
url+="/l3/sfu"
else:
LOG.error("Unknown dataset!")
return None
else:
LOG.error("Unknown data provider!")
return None
url += "/"+basename
return url
def get_file(self,date=None,
receiver=None,
filename=None,
dataset=None,
provider=None,
username=None,
password=None,
data_directory=CURRENT_DIRECTORY,
verbose=True):
"""
This method downloads the Waves data file,
and returns the local path to the file.
"""
if (username is None):
username = self.username
if (password is None):
password = self.password
url = self.get_url(date=date,
receiver=receiver,
provider=provider,
dataset=dataset,
filename=filename)
if (url is not None):
target = download_file(url,
username=username,password=password,
target_directory=data_directory,
verbose=verbose)
return target
else:
return None
def read_file(self,filename,
verbose=True):
"""
This method read the Waves data file
"""
basename = os.path.basename(filename)
if (re.search("\d{8}.R\d{1}",basename)) or \
(re.search("\d{8}.tnr",basename)) or \
(re.search("\d{8}.R\d{1}.Z",basename)) or \
(re.search("\d{8}.tnr.Z",basename)):
array = self.read_gsfc(filename)
return array
def get_data(self,filename=None,
date=None,receiver=None,
data_directory=None,
download_file=False,
delete_file=False,
verbose=True,
prep=False,
interpolate=False,
dB=False):
"""
This method returns the Waves data.
"""
if (filename is None):
filename = self.get_filename(date,receiver=receiver,
verbose=verbose)
if (data_directory is None):
data_directory = os.path.dirname(filename)
filename = os.path.basename(filename)
filepath = os.path.join(data_directory,filename)
if not (os.path.isfile(filepath)):
if (verbose): print "%s not found!" % filepath
if (download_file):
filepath = self.get_file(date=date,receiver=receiver,
data_directory=data_directory,
filename=filename,verbose=verbose)
if (filepath is None): return None
else:
return None
data = self.read_file(filepath,verbose=verbose)
if (delete_file):
if (os.path.isfile(filepath)):
os.remove(filepath)
if (verbose): print "%s deleted" % filepath
if (prep):
return self.prep_data(data,dB=dB,interpolate=interpolate)
else:
return data
def read_lesia(self,filename):
"""
This method reads the ObsParis/LESIA waves data file.
"""
rec = self.get_rec(filename,provider="lesia")
ds = self.get_dataset(filename,provider="lesia")
#with (open(filename,'rb') as frb):
# content = frb.read()
#if (rec == "rad1") and (ds == "l2_hres"):
def read_gsfc(self,filename):
"""
This method reads the nasa/gsfc waves data file.
"""
basename = os.path.basename(filename)
if (basename.endswith("Z")):
ext=basename.split(".")[-2].lower()
cmd=["gzip","-dc",filename]
gzip_process = subprocess.Popen(cmd,stdin=subprocess.PIPE,
stdout=subprocess.PIPE)
output, error = gzip_process.communicate()
if (gzip_process.wait() == 0):
file_content=output.split("\n")[0:-1]
else:
ext=basename.split(".")[-1].lower()
fr = open(file,'r')
file_content = fr.read().split("\n")[0:-1]
fr.close()
nf = len(file_content)
nt = len(file_content[0].split())
array = np.zeros((nt,nf),dtype=np.float32)
for i,line in enumerate(file_content):
array[:,i] = np.float32(line.split())
nt -=1
dt=60.0 #sec
time = dt*np.arange(nt,dtype=np.float32)
if (ext == "r1"):
receiver="rad1"
bandwidth=RAD1_BANDWIDTH
integration_time=RAD1_INT_TIME_S
if (nf != RAD1_FREQ_NUM): return None
df = RAD1_FREQ_STEP
freq = RAD1_FREQ_STEP*np.arange(nf,dtype=np.float32) + RAD1_FREQ_MIN
elif (ext == "r2"):
receiver="rad2"
bandwidth=RAD2_BANDWIDTH
integration_time=RAD2_INT_TIME_S
if (nf != RAD2_FREQ_NUM): return None
df = RAD2_FREQ_STEP
freq = RAD2_FREQ_STEP*np.arange(nf,dtype=np.float32) + RAD2_FREQ_MIN
elif (ext == "tnr"):
receiver="tnr"
bandwidth=TNR_BANDWIDTH
integration_time=TNR_INT_TIME_S
if (nf != TNR_FREQ_NUM): return None
df = TNR_FREQ_STEP
freq=power(10,np.arange(nf,dtype=np.float32)*0.0188144+np.log10(TNR_FREQ_MIN))
else:
return None
date = self.get_date(filename)
hmin, mmin, smin = split_time(min(time/3600.))
hmax, mmax, smax = split_time(max(time/3600.))
date_obs = datetime(day=date.day,year=date.year,month=date.month,
hour=hmin,minute=mmin,second=smin)
date_end = datetime(day=date.day,year=date.year,month=date.month,
hour=hmax,minute=mmax,second=smax)
data = spectrum(observatory=self.observatory,
instrument=self.instrument,
receiver=receiver,
intensity=array[0:-1,:],
background=array[-1,:],
time=time,frequency=freq,
date_obs=date_obs,
date_end=date_end,
naxis=[nt,nf],
cdelt=[dt,df],
bandwidth=bandwidth,
integration_time=integration_time,
intensity_units="Intensity above background",
comment="60 sec. average data produced by GSFC (NASA).")
return data
def prep_data(self,data,
quantile=0.1,nbins=1000,
interpolate=False,dB=False,
substract_background=False):
"""
This method preprocesses data (e.g., substract background).
"""
prov = self.provider
prep_data = deepcopy(data)
array = data.get_parameter("intensity")
nt = data.naxis[0] ; nf = data.naxis[1]
frequency = data.get_parameter("frequency")
background = data.get_parameter("background")
int_time = data.get_parameter("integration_time")
bandwidth = data.get_parameter("bandwidth")
rms = 1./np.sqrt(int_time*bandwidth)
if (prov == "gsfc"):
if (interpolate):
where_ok = np.where(background > 0.0)
background=barycentric_interpolate(frequency[where_ok],background[where_ok],
frequency)
snr = np.zeros((nt,nf),dtype=np.float32)
for j in range(nf):
array_j = array[:,j]*background[j]
if (sum(array_j) == 0.0): continue
array_j = array_j - background[j]
snr[:,j] = array_j/(background[j]*rms)
if (substract_background):
array[:,j]=array_j
if (dB):
array = to_dB(array.clip(MIN_VAL,array.max()))
snr = to_dB(snr.clip(MIN_VAL,snr.max()))
background = to_dB(background.clip(MIN_VAL,background.max()))
if (substract_background):
intensity_units = "Intensity (dB)"
else:
intensity_units = "Intensity above background (dB)"
else:
if (substract_background):
intensity_units = "Intensity"
prep_data.set_parameter("intensity_units",intensity_units)
prep_data.set_parameter("intensity",array)
prep_data.set_parameter("background",background)
prep_data.set_parameter("snr",snr)
return prep_data
def write_img(self,filename=None,
date=None,receiver=None,
data=None,
format='jpg',quality=80,
data_directory=None,
output_filename=None,
output_directory=None,
min_val=None,max_val=None,
verbose=True,greyscale=True,
reverse_color=True,
download_file=False,
delete_file=False,
prep=False):
"""
Write output image file containing the dynamical spectrum.
"""
if (greyscale):
mode='L'
else:
mode='RGB'
ext = format.lower()
if (data is None):
data = self.get_data(date=date,receiver=receiver,
filename=filename,
data_directory=data_directory,
download_file=download_file,
delete_file=delete_file,
verbose=verbose,prep=prep)
if (data is None):
return ""
array = data.intensity
if (min_val is None): min_val = array.min()
if (max_val is None): max_val = array.max()
array = array.clip(min_val,max_val)
if not ("(db)" in data.intensity_units.lower()):
array = to_dB(array)
array = bytescale(array)
if (reverse_color):
array = array.max() - array
image = Image.fromarray(array,mode=mode)
if (output_filename is None):
if (filename is None):
filename = self.get_filename(date,receiver=receiver)
output_filename = os.path.basename(filename)+"."+ext
if (output_directory is None):
output_path = output_filename
else:
output_path = os.path.join(output_directory,os.path.basename(output_filename))
image.save(output_path,quality=quality)
return output_path
# STEREO/Waves class
class stereo():
def __init__():
pass
# Spectrum class
class spectrum():
def __init__(self,
observatory="",
instrument="",
receiver="",
intensity=None,
background=None,
snr=None,
time=None,
frequency=None,
date_obs=None,
date_end=None,
naxis=None,
cdelt=None,
bandwidth=None,
integration_time=None,
intensity_units="",
comment=""):
self.observatory=observatory
self.instrument=instrument
self.receiver=receiver
self.intensity=intensity
self.background=background
self.snr=snr
self.time=time
self.frequency=frequency
self.date_obs=date_obs
self.date_end=date_end
self.naxis=naxis
self.cdelt=cdelt
self.bandwidth=bandwidth
self.integration_time=integration_time
self.intensity_units=intensity_units
self.comment=comment
# Method to set input parameter
def set_parameter(self,parameter,value):
if (parameter in self.__dict__):
self.__dict__[parameter] = value
# Method to get an attribute's value
def get_parameter(self,parameter):
value = None
if (parameter in self.__dict__):
value = self.__dict__[parameter]
return value
# Method to download a file providing its url
def download_file(url,
output_filename="",
target_directory=CURRENT_DIRECTORY,
tries=3,
timeout=180,
timesleep=3,
username=None,
password=None,
overwrite=False,
verbose=True):
if (verbose): print "Downloading %s..." % url
target=None
for i in range(tries):
try:
if (username is not None):
url_items=urlparse(url)
url = url_items.scheme + "://" + username
if (password is not None):
url+= ":" + password
url+="@"+url_items.netloc+url_items.path
connect = urllib2.urlopen(url,None,timeout)
except urllib2.URLError,why:
if (verbose): print "Can not reach %s: %s [%s tries remaining]" % (url,why,tries-i)
time.sleep(timesleep)
continue
else:
if not (output_filename): output_filename = os.path.basename(url)
target = os.path.join(target_directory,output_filename)
if not (os.path.isfile(target)) or (overwrite):
fw = open(target,'wb')
fw.write(connect.read())
fw.close()
if (verbose): print "%s saved" % target
else:
if (verbose): print "%s already exists" % target
break
return target
# Method to compute the 100*Q% quantile of a set X of values
def get_quantile(X,Q,nbins=None,dX=None):
nX = len(X)
if (dX is None):
dX=2.0*np.median(abs(diff(X)))
if (nbins is None):
nbins=int((max(X)-min(X))/dX) + 1
h, xh = np.histogram(X,bins=nbins)
threshold=Q*sum(h)
i=0 ; hsum=0.0 ; nh = len(h)
while (hsum < threshold):
hsum+=h[i]
i+=1
if (i == (nh)): break
return xh[i-1]
# Method to convert in dB
def to_dB(array):
arr = np.log10(np.array(array))
ylog=np.multiply(10.0,arr)
return ylog
# Method tot split float hours into hour, minute, second
def split_time(float_time):
hour = int(float_time)
minute = int((float_time - float(hour))*60.0)
second = int(float_time*3600.0 - float(hour)*3600.0 - float(minute)*60.0)
return hour, minute, second
def setup_logging(filename = None, quiet = False, verbose = False, debug = False):
"""
Method to setup a logging instance.
"""
global logging
if debug:
logging.basicConfig(level = logging.DEBUG, format='%(levelname)-8s: %(message)s')
elif verbose:
logging.basicConfig(level = logging.INFO, format='%(levelname)-8s: %(message)s')
else:
logging.basicConfig(level = logging.CRITICAL, format='%(levelname)-8s: %(message)s')
if quiet:
logging.root.handlers[0].setLevel(logging.CRITICAL + 10)
elif verbose:
logging.root.handlers[0].setLevel(logging.INFO)
else:
logging.root.handlers[0].setLevel(logging.CRITICAL)
if filename:
import logging.handlers
fh = logging.FileHandler(filename, delay=True)
fh.setFormatter(logging.Formatter('%(asctime)s %(name)-12s %(levelname)-8s %(funcName)-12s %(message)s', datefmt='%Y-%m-%d %H:%M:%S'))
if debug:
fh.setLevel(logging.DEBUG)
else:
fh.setLevel(logging.INFO)
logging.root.addHandler(fh)
| 33.667532 | 144 | 0.494985 | 2,728 | 25,924 | 4.573314 | 0.141129 | 0.011542 | 0.011542 | 0.016752 | 0.30234 | 0.251122 | 0.191488 | 0.143395 | 0.099711 | 0.088971 | 0 | 0.020115 | 0.397855 | 25,924 | 769 | 145 | 33.711313 | 0.779116 | 0.025729 | 0 | 0.321489 | 0 | 0 | 0.069142 | 0.008544 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0.01692 | 0.020305 | null | null | 0.010152 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
aebacbff88644f6b11b87656284f7f87a822a746 | 2,072 | py | Python | coturn/signals.py | Core4ce/avalon-django-coturn | 51609b04cf4ef2eaccbc08ee7884d59c6fa87474 | [
"BSD-3-Clause"
] | 3 | 2020-09-20T20:06:21.000Z | 2021-09-10T11:18:09.000Z | coturn/signals.py | Core4ce/avalon-django-coturn | 51609b04cf4ef2eaccbc08ee7884d59c6fa87474 | [
"BSD-3-Clause"
] | null | null | null | coturn/signals.py | Core4ce/avalon-django-coturn | 51609b04cf4ef2eaccbc08ee7884d59c6fa87474 | [
"BSD-3-Clause"
] | 1 | 2019-01-17T13:47:31.000Z | 2019-01-17T13:47:31.000Z | import hmac
import hashlib
from django.conf import settings
from django.contrib.auth.models import User
from .models import TurnusersLt
def sync_new_user_to_coturn(sender, instance, **kwargs):
##
# NOTE: Do not use this for REST API. You do not need to sync users if you are using the REST API.
#
#
if sender == User:
username = instance.get_username()
else:
if not hasattr(settings, "COTURN_USERNAME_FIELD"):
raise ImportError("Coturn was sent a signal from a non-django User model, but COTURN_USERNAME_FIELD is not set")
if not hasattr(instance, settings.COTURN_USERNAME_FIELD):
raise ValueError("Coturn - username field {} does not exist on model we were sent in sync signal".format(settings.COTURN_USERNAME_FIELD))
if "." in settings.COTURN_USERNAME_FIELD:
# walk down the set of sub-fields until you reach the username
fields = settings.COTURN_USERNAME_FIELD.split(".")
working_instance = instance
for field in fields:
if hasattr(working_instance, field):
working_instance = getattr(working_instance, field)
else:
raise ValueError("missing sub-field in username search: {}".format(field))
username = working_instance()
else:
username = getattr(instance, settings.COTURN_USERNAME_FIELD)
username = username()
if not hasattr(settings, "COTURN_REALM"):
raise ValueError("Coturn - missing COTURN_REALM entry in settings.py")
realm = settings.COTURN_REALM
# NOTE: since we assume the system will be running coturn in REST API mode, this password will never be used.
# so we set it to something random.
password = User.objects.make_random_password()
hash_val = hmac.new(settings.SECRET_KEY.encode("utf-8"), password.encode("utf-8"), hashlib.sha1)
hash_val.update(realm)
new_user = TurnusersLt(name=username, realm=realm, password=hash_val.hexdigest())
new_user.save(using="coturn")
| 47.090909 | 149 | 0.678089 | 270 | 2,072 | 5.074074 | 0.377778 | 0.081752 | 0.110949 | 0.118248 | 0.105839 | 0 | 0 | 0 | 0 | 0 | 0 | 0.001905 | 0.239865 | 2,072 | 43 | 150 | 48.186047 | 0.867937 | 0.144305 | 0 | 0.090909 | 0 | 0 | 0.175837 | 0.023823 | 0 | 0 | 0 | 0 | 0 | 1 | 0.030303 | false | 0.090909 | 0.181818 | 0 | 0.212121 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
aebb1ee94b1592a14967eb3ea0476a46d0c07a3e | 906 | py | Python | day/day_02/solution.py | moki/aoc2015 | da43fccd20d154840161c022d1f3c0f70035d604 | [
"BSD-3-Clause"
] | null | null | null | day/day_02/solution.py | moki/aoc2015 | da43fccd20d154840161c022d1f3c0f70035d604 | [
"BSD-3-Clause"
] | null | null | null | day/day_02/solution.py | moki/aoc2015 | da43fccd20d154840161c022d1f3c0f70035d604 | [
"BSD-3-Clause"
] | null | null | null | from functools import reduce
def part_1(input):
lines = input.split("\n")
paper = 0
for line in lines:
if len(line) == 0:
continue
ds = list(map(lambda d: int(d), line.split("x")))
sides = [ds[0] * ds[1], ds[1] * ds[2], ds[0] * ds[2]]
m = reduce(lambda a, b: min(a, b), sides, sides[0])
paper += reduce(lambda a, b: a + b * 2, sides, 0) + m
return paper
def part_2(input):
lines = input.split("\n")
ribbon = 0
for line in lines:
if len(line) == 0:
continue
ds = list(map(lambda d: int(d), line.split("x")))
bow = reduce(lambda a, b: a * b, ds, 1)
print(bow)
ribbon += bow
m = reduce(lambda a, b: min(a, b), ds, ds[0])
ds.remove(m)
m2 = reduce(lambda a, b: min(a, b), ds, ds[0])
ribbon += m2 + m2 + m + m
return ribbon
| 18.875 | 61 | 0.484547 | 144 | 906 | 3.034722 | 0.263889 | 0.045767 | 0.148741 | 0.160183 | 0.615561 | 0.519451 | 0.446224 | 0.446224 | 0.398169 | 0.398169 | 0 | 0.035714 | 0.350993 | 906 | 47 | 62 | 19.276596 | 0.707483 | 0 | 0 | 0.37037 | 0 | 0 | 0.006623 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.074074 | false | 0 | 0.037037 | 0 | 0.185185 | 0.037037 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
aebbc1ae1c7a9472843504f39a0a522d3acd8c60 | 627 | py | Python | steambird/util/import_from_ut_people.py | rhbvkleef/SteamBird | 6dbbad0750ef918872da18b813669282885b8f95 | [
"BSD-3-Clause"
] | null | null | null | steambird/util/import_from_ut_people.py | rhbvkleef/SteamBird | 6dbbad0750ef918872da18b813669282885b8f95 | [
"BSD-3-Clause"
] | 22 | 2020-11-27T19:05:34.000Z | 2020-12-05T16:50:43.000Z | steambird/util/import_from_ut_people.py | rhbvkleef/SteamBird | 6dbbad0750ef918872da18b813669282885b8f95 | [
"BSD-3-Clause"
] | 1 | 2020-11-27T21:08:15.000Z | 2020-11-27T21:08:15.000Z | from typing import Union
from urllib.parse import quote
import requests
import vobject
from vobject.base import Component
def read_vcard(vcard_url) -> Union[Component, None]:
try:
vcard = requests.get(vcard_url).text
return vobject.readOne(vcard)
# pylint: disable=bare-except
except:
return None
def search_people(search_query):
url = "https://people.utwente.nl/data/search?query={}".format(quote(search_query))
people = requests.get(url, headers={
'Accept': 'application/json',
'Referer': 'https://people.utwente.nl/',
}).json()['data']
return people
| 24.115385 | 86 | 0.676236 | 78 | 627 | 5.358974 | 0.487179 | 0.078947 | 0.086124 | 0.095694 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.196172 | 627 | 25 | 87 | 25.08 | 0.829365 | 0.043062 | 0 | 0 | 0 | 0 | 0.175585 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.111111 | false | 0 | 0.277778 | 0 | 0.555556 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
aecdf11e5f1c0c45e570fe2a778e4ea9c2c59a0b | 1,414 | py | Python | cds_ils/patrons/indexer.py | kpsherva/cds-ils | 8eeeb6e03784756ed24895c8d030682f9d733e8a | [
"MIT"
] | 6 | 2020-09-18T00:13:38.000Z | 2021-11-14T17:12:19.000Z | cds_ils/patrons/indexer.py | kpsherva/cds-ils | 8eeeb6e03784756ed24895c8d030682f9d733e8a | [
"MIT"
] | 321 | 2020-08-28T15:42:25.000Z | 2022-03-14T15:11:50.000Z | cds_ils/patrons/indexer.py | kpsherva/cds-ils | 8eeeb6e03784756ed24895c8d030682f9d733e8a | [
"MIT"
] | 8 | 2019-07-10T07:02:08.000Z | 2020-08-10T14:07:25.000Z | # Copyright (C) 2021 CERN.
#
# CDS-ILS is free software; you can redistribute it and/or modify it under
# the terms of the MIT License; see LICENSE file for more details.
"""Patron Indexer for CDS-ILS."""
from flask import current_app
from invenio_accounts.models import User
from invenio_app_ils.patrons.indexer import PatronBaseIndexer
from invenio_app_ils.patrons.indexer import PatronIndexer as ILSPatronIndexer
from invenio_app_ils.proxies import current_app_ils
from invenio_db import db
from invenio_oauthclient.models import RemoteAccount
class PatronIndexer(ILSPatronIndexer):
"""Indexer class for `Patron`."""
def reindex_patrons(self):
"""Re-index all patrons."""
# do not use PatronIndexer class otherwise it will trigger potentially
# thousands of tasks to index referenced records
indexer = PatronBaseIndexer()
Patron = current_app_ils.patron_cls
# cannot use bulk operation because Patron is not a real record
index_local_accounts = current_app.config[
"CDS_ILS_INDEX_LOCAL_ACCOUNTS"
]
if index_local_accounts:
all_user_ids = db.session.query(User.id).all()
else:
all_user_ids = db.session.query(RemoteAccount.user_id).all()
for (user_id,) in all_user_ids:
patron = Patron(user_id)
indexer.index(patron)
return len(all_user_ids)
| 36.25641 | 78 | 0.714286 | 189 | 1,414 | 5.153439 | 0.449735 | 0.067762 | 0.041068 | 0.052361 | 0.125257 | 0.125257 | 0.075975 | 0 | 0 | 0 | 0 | 0.003617 | 0.217822 | 1,414 | 38 | 79 | 37.210526 | 0.877034 | 0.296322 | 0 | 0 | 0 | 0 | 0.028777 | 0.028777 | 0 | 0 | 0 | 0 | 0 | 1 | 0.045455 | false | 0 | 0.318182 | 0 | 0.454545 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
aed26d89ca3f013e25a9babaf5a275d0145aeec8 | 1,671 | py | Python | Web_Server/webapps/buildinginfos/views.py | ajfar-bem/wisebldg | 0cb8ef7c5984cbb5cc86e40780fdf4e14e5bda05 | [
"Unlicense"
] | null | null | null | Web_Server/webapps/buildinginfos/views.py | ajfar-bem/wisebldg | 0cb8ef7c5984cbb5cc86e40780fdf4e14e5bda05 | [
"Unlicense"
] | null | null | null | Web_Server/webapps/buildinginfos/views.py | ajfar-bem/wisebldg | 0cb8ef7c5984cbb5cc86e40780fdf4e14e5bda05 | [
"Unlicense"
] | null | null | null | import json
from django.shortcuts import render
from django.contrib.auth.decorators import login_required
from django.http import HttpResponse, HttpResponseRedirect
from django.template import RequestContext
from webapps.buildinginfos.models import BuildingInfo
from _utils.device_list_utils import get_device_list_and_count
@login_required(login_url='/login/')
def buildinginfos_display(request):
print 'Device status page load'
context = RequestContext(request)
if request.user.groups.filter(name__iexact = 'admin').exists():
return render(request, 'buildinginfos/building_info.html', get_device_list_and_count(request))
else:
return HttpResponseRedirect('/home/')
def change_setting(request):
if request.body:
_data = request.body
_data = json.loads(_data)
building_id = _data.pop('building_id')
info_changed = _data.keys()
building_info = BuildingInfo.objects.get(building_id=building_id)
if building_info in request.user.userprofile.authorized_buildings():
if 'name' in info_changed:
building_info.name = _data['name']
if 'zip_code' in info_changed:
building_info.zip_code = _data['zip_code']
if 'description' in info_changed:
building_info.description = _data['description']
if 'location' in info_changed:
building_info.building_settings['location'] = _data['location']
building_info.save()
message = 'success'
else:
message = 'error'
if request.is_ajax():
return HttpResponse(json.dumps(message))
| 37.133333 | 102 | 0.685218 | 190 | 1,671 | 5.763158 | 0.394737 | 0.087671 | 0.047489 | 0.076712 | 0.12968 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.22681 | 1,671 | 44 | 103 | 37.977273 | 0.847523 | 0 | 0 | 0.054054 | 0 | 0 | 0.099342 | 0.01915 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.189189 | null | null | 0.027027 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
aed55b6ccfc8daceb8b960c5b7cf3555d0fd88f9 | 851 | py | Python | tests/utils/test_serializer.py | guilhermevarela/ilu | e4db9744c28f9e04ae82c884f131ee8cd9601cc8 | [
"MIT"
] | 2 | 2019-10-18T17:04:50.000Z | 2019-10-18T17:05:04.000Z | tests/utils/test_serializer.py | guilhermevarela/ilurl | e4db9744c28f9e04ae82c884f131ee8cd9601cc8 | [
"MIT"
] | 17 | 2019-11-20T09:33:50.000Z | 2020-01-30T14:57:40.000Z | tests/utils/test_serializer.py | gsavarela/ilurl | e4db9744c28f9e04ae82c884f131ee8cd9601cc8 | [
"MIT"
] | null | null | null | import os
import unittest
from ilurl.utils.serialize import Serializer
class TestSerialization(unittest.TestCase):
class Foo(Serializer):
def __init__(self, foo):
self.foo = foo
def setUp(self):
'''Define the simplest test class'''
script_path = os.path.dirname(os.path.realpath(__file__))
tests_path = '/'.join(script_path.split('/')[:-1])
self.dump_path = '{}/data/'.format(tests_path)
def test_dump(self):
f = self.Foo('Nerf')
f.dump(self.dump_path, 'foo')
def test_load(self):
f = self.Foo('Nerf')
f.dump(self.dump_path, 'foo')
g = self.Foo.load(self.dump_path + 'foo.pickle')
self.assertEqual(f.foo, g.foo)
def tearDown(self):
'''Remove pickles'''
os.remove('{}{}'.format(self.dump_path, 'foo.pickle'))
| 27.451613 | 65 | 0.601645 | 111 | 851 | 4.441441 | 0.369369 | 0.070994 | 0.121704 | 0.121704 | 0.231237 | 0.146045 | 0.146045 | 0.146045 | 0.146045 | 0.146045 | 0 | 0.001546 | 0.239718 | 851 | 30 | 66 | 28.366667 | 0.760433 | 0.052879 | 0 | 0.190476 | 0 | 0 | 0.060377 | 0 | 0 | 0 | 0 | 0 | 0.047619 | 1 | 0.238095 | false | 0 | 0.142857 | 0 | 0.47619 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
aeda90d64b894a59f11f854e2a4117bfb466ef39 | 1,564 | py | Python | morphology_embed.py | hbatta/morphological-nmt | 2553b13bc13553662ad861ae6e809e152939d559 | [
"MIT"
] | null | null | null | morphology_embed.py | hbatta/morphological-nmt | 2553b13bc13553662ad861ae6e809e152939d559 | [
"MIT"
] | null | null | null | morphology_embed.py | hbatta/morphological-nmt | 2553b13bc13553662ad861ae6e809e152939d559 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""Morphology_embed.ipynb
Automatically generated by Colaboratory.
Original file is located at
https://colab.research.google.com/drive/1uZUe-O4GgKuMK1FfbAV7xztgtD4lWEEC
"""
import stanza
import argparse
parser = argparse.ArgumentParser(description='A tutorial of argparse!')
parser.add_argument("--language")
parser.add_argument("--inputfile")
parser.add_argument("--outputfile")
args = parser.parse_args()
lang = args.language
inp_f=args.inputfile
opt_f=args.outputfile
fr=open(inp_f,'r+')
entxt=fr.read()
nlp = stanza.Pipeline(lang=lang, processors='tokenize,pos',use_gpu=True, pos_batch_size=2000)
doc = nlp(entxt)
print(len(doc.sentences))
fd=open(opt_f,'a+')
featlst=['Case', 'AdpType', 'Gender', 'Mood', 'Person', 'PronType', 'Tense', 'VerbForm', 'Voice', 'Number']
i=0
for sent in doc.sentences:
wrdlst=[]
print(i)
i=i+1
for word in sent.words:
formatted_word=f'{word.text}+{word.upos}'
if word.feats:
morph_feat_dict = dict(x.split("=") for x in word.feats.split("|"))
feat_form=''
for feat in featlst:
if feat in morph_feat_dict:
feat_form=feat_form+'+'+morph_feat_dict[feat]
else:
feat_form=feat_form+'+'+'-'
#print(feat_form)
#print(d)
else:
feat_form='+-+-+-+-+-+-+-+-+-+-'
formatted_word=formatted_word+feat_form
wrdlst.append(formatted_word)
#print(formatted_word)
formatted_sent=' '.join(wrdlst)
fd.write(formatted_sent)
fd.write("\n")
fd.close()
print("file formatting has been done for language:",lang)
| 28.436364 | 107 | 0.683504 | 216 | 1,564 | 4.796296 | 0.490741 | 0.061776 | 0.049228 | 0.032819 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.009016 | 0.148977 | 1,564 | 54 | 108 | 28.962963 | 0.769346 | 0.152813 | 0 | 0.04878 | 1 | 0 | 0.171103 | 0.01749 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.04878 | 0 | 0.04878 | 0.073171 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
aedc3dfc1d19040bf061f190f401892381f40e61 | 402 | py | Python | Projects/Project 0/Warm-up/loops.py | ivenpoker/Python-Projects | 2975e1bd687ec8dbcc7a4842c13466cb86292679 | [
"MIT"
] | 1 | 2019-09-23T15:51:45.000Z | 2019-09-23T15:51:45.000Z | Projects/Project 0/Warm-up/loops.py | ivenpoker/Python-Projects | 2975e1bd687ec8dbcc7a4842c13466cb86292679 | [
"MIT"
] | 5 | 2021-02-08T20:47:19.000Z | 2022-03-12T00:35:44.000Z | Projects/Project 0/Warm-up/loops.py | ivenpoker/Python-Projects | 2975e1bd687ec8dbcc7a4842c13466cb86292679 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
#: Program Purpose:
#: Read an integer N. For all non-negative integers i < N
#: print i * i.
#:
#: Program Author: Happi Yvan <ivensteinpoker@gmail.com
#: Program Date : 11/04/2019 (mm/dd/yyyy)
def process(int_val):
for x in range(int_val):
print(x * x)
def main():
val = int(input())
process(val)
if __name__ == '__main__':
main() | 21.157895 | 66 | 0.599502 | 59 | 402 | 3.915254 | 0.694915 | 0.051948 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.0299 | 0.251244 | 402 | 19 | 67 | 21.157895 | 0.737542 | 0.554726 | 0 | 0 | 0 | 0 | 0.046243 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | false | 0 | 0 | 0 | 0.25 | 0.125 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
aee48d9d882c754b1a18821795b4715b37c2efd0 | 3,993 | py | Python | scheme/tests/07.py | sfailsthy/CS-61A | 8c3d6e2ed87c42ba6ceaf9b7853ef4ad3af099fc | [
"MIT"
] | null | null | null | scheme/tests/07.py | sfailsthy/CS-61A | 8c3d6e2ed87c42ba6ceaf9b7853ef4ad3af099fc | [
"MIT"
] | null | null | null | scheme/tests/07.py | sfailsthy/CS-61A | 8c3d6e2ed87c42ba6ceaf9b7853ef4ad3af099fc | [
"MIT"
] | null | null | null | test = {
'name': 'Problem 7',
'points': 1,
'suites': [
{
'cases': [
{
'answer': '103495fc3358e1b6354d1d4a277039e6',
'choices': [
r"""
Pair('quote', Pair(A, nil)), where:
A is the quoted expression
""",
r"""
[A], where:
A is the quoted expression
""",
r"""
Pair(A, nil), where:
A is the quoted expression
""",
r"""
A, where:
A is the quoted expression
"""
],
'hidden': False,
'locked': True,
'question': 'What is the structure of the expressions argument to do_quote_form?'
}
],
'scored': False,
'type': 'concept'
},
{
'cases': [
{
'code': r"""
scm> (quote hello)
506108214132d6cb9c307edb2f9d0f8b
# locked
scm> 'hello
506108214132d6cb9c307edb2f9d0f8b
# locked
scm> ''hello
11e53d0ebd5fcb87953db84e824e20c7
# locked
# choice: (quote hello)
# choice: hello
# choice: (hello)
# choice: (quote (quote (hello)))
scm> (quote (1 2))
e0115c13325291c6a30393eff9777ee4
# locked
scm> '(1 2)
e0115c13325291c6a30393eff9777ee4
# locked
scm> (quote (1 . 2))
9a8a33947ee1cd41aa91f5f15184c47b
# locked
scm> '(1 . (2))
e0115c13325291c6a30393eff9777ee4
# locked
scm> (car '(1 2 3))
1d6ef7880cd9b59b64a1f4e1a1e35a12
# locked
scm> (cdr '(1 2))
1c70ebb4f1aabfcbe22f96bda497dd0b
# locked
scm> (car (car '((1))))
1d6ef7880cd9b59b64a1f4e1a1e35a12
# locked
scm> (quote 3)
ed2605996ac3b24d98b27c6d58145f06
# locked
scm> (eval (cons 'car '('(4 2))))
5dc34dbe25d53109ac62b4184b75a40f
# locked
""",
'hidden': False,
'locked': True
}
],
'scored': True,
'setup': '',
'teardown': '',
'type': 'scheme'
},
{
'cases': [
{
'code': r"""
>>> read_line(" (quote x) ")
55894b325c4c2817733a8a1223c79f1e
# locked
>>> read_line(" 'x ")
55894b325c4c2817733a8a1223c79f1e
# locked
# choice: Pair('x', nil)
# choice: 'x'
# choice: Pair('quote', 'x')
# choice: Pair('quote', Pair('x', nil))
>>> read_line(" (a b) ")
6e7962ce0515005f1aa1ece26c1f9f99
# locked
# choice: Pair('a', Pair('b', nil))
# choice: Pair('quote', Pair(Pair('a', Pair('b', nil)), nil))
# choice: Pair('quote', Pair('a', 'b'))
# choice: Pair('quote', Pair('a', Pair('b', nil)))
>>> read_line(" '(a b) ")
1af43453acd78705e072b903fe9ce759
# locked
# choice: Pair('a', Pair('b', nil))
# choice: Pair('quote', Pair(Pair('a', Pair('b', nil)), nil))
# choice: Pair('quote', Pair('a', 'b'))
# choice: Pair('quote', Pair('a', Pair('b', nil)))
>>> read_line(" '((a)) ")
6b34a9dd52ff83f52d5e6953f2d7375f
# locked
# choice: Pair('quote', Pair(Pair('a', nil), nil))
# choice: Pair('quote', Pair(Pair('a', nil), nil), nil)
# choice: Pair('quote', Pair(Pair('a'), nil))
# choice: Pair('quote', Pair(Pair('a'), nil), nil)
# choice: Pair('quote', Pair(Pair(Pair('a', nil), nil), nil))
""",
'hidden': False,
'locked': True
}
],
'scored': True,
'setup': r"""
>>> from scheme_reader import *
""",
'teardown': '',
'type': 'doctest'
}
]
}
| 28.726619 | 91 | 0.444528 | 325 | 3,993 | 5.436923 | 0.218462 | 0.090549 | 0.095642 | 0.129032 | 0.47708 | 0.39502 | 0.387097 | 0.286927 | 0.286927 | 0.265988 | 0 | 0.166106 | 0.404458 | 3,993 | 138 | 92 | 28.934783 | 0.576955 | 0 | 0 | 0.460317 | 0 | 0.02381 | 0.822554 | 0.159204 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.007937 | 0 | 0.007937 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
aee4a73710fb4c9dcd2d5d3439e31c41e58818fc | 473 | py | Python | tools/gz.py | codeyu/unihandecode-net | 4b4a6cf2516ceb98e3d63aa9bffa5e5f8110a14d | [
"MIT"
] | null | null | null | tools/gz.py | codeyu/unihandecode-net | 4b4a6cf2516ceb98e3d63aa9bffa5e5f8110a14d | [
"MIT"
] | null | null | null | tools/gz.py | codeyu/unihandecode-net | 4b4a6cf2516ceb98e3d63aa9bffa5e5f8110a14d | [
"MIT"
] | null | null | null | import os, gzip
INPUT_FILES=['kr_codepoints.json','ja_codepoints.json','zh_codepoints.json','vn_codepoints.json','yue_codepoints.json','unicodepoints.json']
for f in INPUT_FILES:
source = os.path.join('result', f)
input = open(source, 'rb')
s = input.read()
input.close()
dest = os.path.join('../src/Unihandecode/_gz', f+'.gz')
output = gzip.GzipFile(dest, 'wb')
output.write(s)
output.close()
print("done") | 33.785714 | 140 | 0.621564 | 63 | 473 | 4.539683 | 0.555556 | 0.244755 | 0.06993 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.20296 | 473 | 14 | 141 | 33.785714 | 0.758621 | 0 | 0 | 0 | 0 | 0 | 0.314346 | 0.048523 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.083333 | 0 | 0.083333 | 0.083333 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
aeeaaec6d2ab2612292389d2e41cac715a52e5d1 | 820 | py | Python | triangle.py | Mohan-V-P/Shapes | e78e663caeb348b37551288434528def7cd14339 | [
"MIT"
] | null | null | null | triangle.py | Mohan-V-P/Shapes | e78e663caeb348b37551288434528def7cd14339 | [
"MIT"
] | null | null | null | triangle.py | Mohan-V-P/Shapes | e78e663caeb348b37551288434528def7cd14339 | [
"MIT"
] | null | null | null | import cv2
import numpy as np
from shapes import Myinit
class Triangle(Myinit):
def __init__(self):
super(Triangle, self).__init__()
self.vertices = np.array([[100,50], [150,150], [50,150]],np.int32)
self.vertices = self.vertices.reshape((-1, 1, 2))
self.color=(255,0,255)
def form_shape(self):
self.img = cv2.polylines(self.img, [self.vertices], True, self.color)
cv2.fillPoly(self.img, [self.vertices], self.color)
def welcome(self):
print('Printing Triangle...!')
def sides(self):
print("Triangle has 3 sides.")
def draw_shape(self):
self.welcome()
self.form_shape()
self.sides()
cv2.imshow("Triangle", self.img)
cv2.waitKey(0)
cv2.destroyAllWindows()
| 28.275862 | 78 | 0.584146 | 103 | 820 | 4.543689 | 0.407767 | 0.128205 | 0.068376 | 0.081197 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.060201 | 0.270732 | 820 | 28 | 79 | 29.285714 | 0.722408 | 0 | 0 | 0 | 0 | 0 | 0.063131 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.217391 | false | 0 | 0.130435 | 0 | 0.391304 | 0.086957 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
aef0826fcc445a6da01b268bd484e494caf70f09 | 3,188 | py | Python | koapy/cli/utils/credential.py | resoliwan/koapy | b0616f252bb3588695dfb37c7d9b8580a65649a3 | [
"MIT"
] | 1 | 2021-09-25T22:33:01.000Z | 2021-09-25T22:33:01.000Z | koapy/cli/utils/credential.py | resoliwan/koapy | b0616f252bb3588695dfb37c7d9b8580a65649a3 | [
"MIT"
] | null | null | null | koapy/cli/utils/credential.py | resoliwan/koapy | b0616f252bb3588695dfb37c7d9b8580a65649a3 | [
"MIT"
] | 1 | 2021-11-12T15:33:29.000Z | 2021-11-12T15:33:29.000Z | import os
import click
from koapy.config import (
config,
config_from_dict,
default_user_config_path,
save_config,
user_config,
)
def prompt_credential():
credential = config.get("koapy.backend.kiwoom_open_api_plus.credential")
default_user_id = credential["user_id"]
default_user_password = credential["user_password"]
default_server = "simulation" if credential["is_simulation"] else "real"
default_cert_password = credential["cert_password"]
user_id = click.prompt("User ID", default=default_user_id)
user_password = click.prompt(
"User Password",
hide_input=True,
default=default_user_password,
show_default=False,
)
is_simulation = (
click.prompt(
"Server Type",
type=click.Choice(["real", "simulation"], case_sensitive=False),
default=default_server,
)
== "simulation"
)
if is_simulation:
cert_password = default_cert_password
else:
cert_password = click.prompt(
"Cert Password",
hide_input=True,
default=default_cert_password,
show_default=False,
)
account_count = click.prompt("Account Count", type=int, default=1)
account_passwords = {}
for _ in range(account_count):
account_number = click.prompt("Account Number", default="0000000000")
account_password = click.prompt(
"Account Password",
hide_input=True,
default="0000",
show_default=False,
)
account_passwords[account_number] = account_password
credential = {
"user_id": user_id,
"user_password": user_password,
"cert_password": cert_password,
"is_simulation": is_simulation,
"account_passwords": account_passwords,
}
credential = config_from_dict(credential)
return credential
def get_credential(interactive=False):
if not interactive:
credential = config.get("koapy.backend.kiwoom_open_api_plus.credential")
else:
credential = prompt_credential()
save_credential = (
click.prompt(
"Save credential info into a config file?",
type=click.Choice(["y", "n"], case_sensitive=False),
default="n",
)
== "y"
)
if save_credential:
config_path = click.prompt(
"Path to save config file", default=default_user_config_path
)
if os.path.exists(config_path):
should_write = (
click.prompt(
"The file already exists, overwrite?",
type=click.Choice(["y", "n"], case_sensitive=False),
default="n",
)
== "y"
)
else:
should_write = True
if should_write:
user_config.put(
"koapy.backend.kiwoom_open_api_plus.credential", credential
)
save_config(config_path, user_config)
return credential
| 29.794393 | 80 | 0.577164 | 315 | 3,188 | 5.55873 | 0.209524 | 0.062821 | 0.03084 | 0.037693 | 0.194746 | 0.178755 | 0.138778 | 0.116505 | 0.116505 | 0.116505 | 0 | 0.007036 | 0.331242 | 3,188 | 106 | 81 | 30.075472 | 0.814259 | 0 | 0 | 0.236559 | 0 | 0 | 0.153701 | 0.042346 | 0 | 0 | 0 | 0 | 0 | 1 | 0.021505 | false | 0.172043 | 0.032258 | 0 | 0.075269 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.