function stringlengths 11 56k | repo_name stringlengths 5 60 | features list |
|---|---|---|
def update_document_on_search_service(sender, **kwargs):
if not kwargs.get('raw', False):
instance = kwargs['instance']
search_index_update('metric', instance.id) | policycompass/policycompass-services | [
2,
11,
2,
2,
1404486772
] |
def delete_document_on_search_service(sender, **kwargs):
instance = kwargs['instance']
search_index_delete('metric', instance.id) | policycompass/policycompass-services | [
2,
11,
2,
2,
1404486772
] |
def shrink_host(url):
u = urlparse.urlparse(url)[1].split('.')
u = u[-2] + '.' + u[-1]
return u.encode('utf-8') | mrknow/filmkodi | [
68,
68,
68,
206,
1444160337
] |
def request(url, close=True, redirect=True, error=False, proxy=None, post=None, headers=None, mobile=False, limit=None, referer=None, cookie=None, compression=True, output='', timeout='30', XHR=False):
try:
#control.log('@@@@@@@@@@@@@@ - URL:%s POST:%s' % (url, post))
handlers = []
if not proxy == None:
handlers += [urllib2.ProxyHandler({'http':'%s' % (proxy)}), urllib2.HTTPHandler]
opener = urllib2.build_opener(*handlers)
opener = urllib2.install_opener(opener)
if output == 'cookie' or output == 'extended' or not close == True:
cookies = cookielib.LWPCookieJar()
handlers += [urllib2.HTTPHandler(), urllib2.HTTPSHandler(), urllib2.HTTPCookieProcessor(cookies)]
opener = urllib2.build_opener(*handlers)
opener = urllib2.install_opener(opener)
if (2, 7, 9) <= sys.version_info < (2, 7, 11):
try:
import ssl; ssl_context = ssl.create_default_context()
ssl_context.check_hostname = False
ssl_context.verify_mode = ssl.CERT_NONE
handlers += [urllib2.HTTPSHandler(context=ssl_context)]
opener = urllib2.build_opener(*handlers)
opener = urllib2.install_opener(opener)
except:
pass
if url.startswith('//'): url = 'http:' + url
try: headers.update(headers)
except: headers = {}
if 'User-Agent' in headers:
pass
elif not mobile == True:
#headers['User-Agent'] = agent()
headers['User-Agent'] = cache.get(randomagent, 1)
else:
headers['User-Agent'] = 'Apple-iPhone/701.341'
headers['User-Agent'] = 'Mozilla/5.0 (Linux; U; Android 4.0.3; ko-kr; LG-L160L Build/IML74K) AppleWebkit/534.30 (KHTML, like Gecko) Version/4.0 Mobile Safari/534.30'
if 'Referer' in headers:
pass
elif referer == None:
headers['Referer'] = '%s://%s/' % (urlparse.urlparse(url).scheme, urlparse.urlparse(url).netloc)
else:
headers['Referer'] = referer
if not 'Accept-Language' in headers:
headers['Accept-Language'] = 'en-US'
if 'X-Requested-With' in headers:
pass
elif XHR == True:
headers['X-Requested-With'] = 'XMLHttpRequest'
if 'Cookie' in headers:
pass
elif not cookie == None:
headers['Cookie'] = cookie
if 'Accept-Encoding' in headers:
pass
elif compression and limit is None:
headers['Accept-Encoding'] = 'gzip'
if redirect == False:
class NoRedirection(urllib2.HTTPErrorProcessor):
def http_response(self, request, response): return response
opener = urllib2.build_opener(NoRedirection)
opener = urllib2.install_opener(opener)
try: del headers['Referer']
except: pass
if isinstance(post, dict):
post = urllib.urlencode(post)
request = urllib2.Request(url, data=post, headers=headers)
try:
response = urllib2.urlopen(request, timeout=int(timeout))
except urllib2.HTTPError as response:
if response.code == 503:
cf_result = response.read(5242880)
try: encoding = response.info().getheader('Content-Encoding')
except: encoding = None
if encoding == 'gzip':
cf_result = gzip.GzipFile(fileobj=StringIO.StringIO(cf_result)).read()
if 'cf-browser-verification' in cf_result:
netloc = '%s://%s' % (urlparse.urlparse(url).scheme, urlparse.urlparse(url).netloc)
ua = headers['User-Agent']
cf = cache.get(cfcookie().get, 168, netloc, ua, timeout)
headers['Cookie'] = cf
request = urllib2.Request(url, data=post, headers=headers)
response = urllib2.urlopen(request, timeout=int(timeout))
elif error == False:
return
elif error == False:
return
if output == 'cookie':
try: result = '; '.join(['%s=%s' % (i.name, i.value) for i in cookies])
except: pass
try: result = cf
except: pass
if close == True: response.close()
return result
elif output == 'geturl':
result = response.geturl()
if close == True: response.close()
return result
elif output == 'headers':
result = response.headers
if close == True: response.close()
return result
elif output == 'chunk':
try: content = int(response.headers['Content-Length'])
except: content = (2049 * 1024)
if content < (2048 * 1024): return
result = response.read(16 * 1024)
if close == True: response.close()
return result
if limit == '0':
result = response.read(224 * 1024)
elif not limit == None:
result = response.read(int(limit) * 1024)
else:
result = response.read(5242880)
try: encoding = response.info().getheader('Content-Encoding')
except: encoding = None
if encoding == 'gzip':
result = gzip.GzipFile(fileobj=StringIO.StringIO(result)).read()
if 'sucuri_cloudproxy_js' in result:
su = sucuri().get(result)
headers['Cookie'] = su
request = urllib2.Request(url, data=post, headers=headers)
response = urllib2.urlopen(request, timeout=int(timeout))
if limit == '0':
result = response.read(224 * 1024)
elif not limit == None:
result = response.read(int(limit) * 1024)
else:
result = response.read(5242880)
try: encoding = response.info().getheader('Content-Encoding')
except: encoding = None
if encoding == 'gzip':
result = gzip.GzipFile(fileobj=StringIO.StringIO(result)).read()
if 'Blazingfast.io' in result and 'xhr.open' in result:
netloc = '%s://%s' % (urlparse.urlparse(url).scheme, urlparse.urlparse(url).netloc)
ua = headers['User-Agent']
headers['Cookie'] = cache.get(bfcookie().get, 168, netloc, ua, timeout)
result = _basic_request(url, headers=headers, timeout=timeout, limit=limit)
if output == 'extended':
response_headers = response.headers
response_code = str(response.code)
try: cookie = '; '.join(['%s=%s' % (i.name, i.value) for i in cookies])
except: pass
try: cookie = cf
except: pass
if close == True: response.close()
return (result, response_code, response_headers, headers, cookie)
else:
if close == True: response.close()
return result
except Exception as e:
control.log('Client connect url:%s Error %s' % (url,e))
return | mrknow/filmkodi | [
68,
68,
68,
206,
1444160337
] |
def parseDOM(html, name=u"", attrs={}, ret=False):
# Copyright (C) 2010-2011 Tobias Ussing And Henrik Mosgaard Jensen
if attrs is None: attrs = {}
if isinstance(html, str):
try:
html = [html.decode("utf-8")] # Replace with chardet thingy
except:
try:
html = [html.decode("utf-8", "replace")]
except:
html = [html]
elif isinstance(html, unicode):
html = [html]
elif not isinstance(html, list):
return ''
if not name.strip():
return ''
if not isinstance(attrs, dict):
return ''
ret_lst = []
for item in html:
for match in re.findall('(<[^>]*\n[^>]*>)', item):
item = item.replace(match, match.replace('\n', ' ').replace('\r', ' '))
if not attrs:
pattern = '(<%s(?: [^>]*>|/?>))' % (name)
this_list = re.findall(pattern, item, re.M | re.S | re.I)
else:
last_list = None
for key in attrs:
pattern = '''(<%s [^>]*%s=['"]%s['"][^>]*>)''' % (name, key, attrs[key])
this_list = re.findall(pattern, item, re.M | re. S | re.I)
if not this_list and ' ' not in attrs[key]:
pattern = '''(<%s [^>]*%s=%s[^>]*>)''' % (name, key, attrs[key])
this_list = re.findall(pattern, item, re.M | re. S | re.I)
if last_list is None:
last_list = this_list
else:
last_list = [item for item in this_list if item in last_list]
this_list = last_list
lst = this_list
if isinstance(ret, str):
lst2 = []
for match in lst:
pattern = '''<%s[^>]* %s\s*=\s*(?:(['"])(.*?)\\1|([^'"].*?)(?:>|\s))''' % (name, ret)
results = re.findall(pattern, match, re.I | re.M | re.S)
lst2 += [result[1] if result[1] else result[2] for result in results]
lst = lst2
else:
lst2 = []
for match in lst:
end_str = "</%s" % (name)
start_str = '<%s' % (name)
start = item.find(match)
end = item.find(end_str, start)
pos = item.find(start_str, start + 1)
while pos < end and pos != -1: # Ignore too early </endstr> return
tend = item.find(end_str, end + len(end_str))
if tend != -1:
end = tend
pos = item.find(start_str, pos + 1)
if start == -1 and end == -1:
result = ''
elif start > -1 and end > -1:
result = item[start + len(match):end]
elif end > -1:
result = item[:end]
elif start > -1:
result = item[start + len(match):]
else:
result = ''
if ret:
endstr = item[end:item.find(">", item.find(end_str)) + 1]
result = match + result + endstr
result = result.strip()
item = item[item.find(result, item.find(match)):]
lst2.append(result)
lst = lst2
ret_lst += lst
return ret_lst | mrknow/filmkodi | [
68,
68,
68,
206,
1444160337
] |
def cleanHTMLCodes(txt):
txt = txt.replace("'", "")
txt = re.sub("(&#[0-9]+)([^;^0-9]+)", "\\1;\\2", txt)
txt = HTMLParser.HTMLParser().unescape(txt)
txt = txt.replace(""", "\"")
txt = txt.replace("&", "&")
return txt | mrknow/filmkodi | [
68,
68,
68,
206,
1444160337
] |
def randomagent():
BR_VERS = [
['%s.0' % i for i in xrange(18, 50)],
['37.0.2062.103', '37.0.2062.120', '37.0.2062.124', '38.0.2125.101', '38.0.2125.104', '38.0.2125.111', '39.0.2171.71', '39.0.2171.95', '39.0.2171.99',
'40.0.2214.93', '40.0.2214.111',
'40.0.2214.115', '42.0.2311.90', '42.0.2311.135', '42.0.2311.152', '43.0.2357.81', '43.0.2357.124', '44.0.2403.155', '44.0.2403.157', '45.0.2454.101',
'45.0.2454.85', '46.0.2490.71',
'46.0.2490.80', '46.0.2490.86', '47.0.2526.73', '47.0.2526.80', '48.0.2564.116', '49.0.2623.112', '50.0.2661.86', '51.0.2704.103', '52.0.2743.116',
'53.0.2785.143', '54.0.2840.71'],
['11.0'],
['8.0', '9.0', '10.0', '10.6']]
WIN_VERS = ['Windows NT 10.0', 'Windows NT 7.0', 'Windows NT 6.3', 'Windows NT 6.2', 'Windows NT 6.1', 'Windows NT 6.0', 'Windows NT 5.1', 'Windows NT 5.0']
FEATURES = ['; WOW64', '; Win64; IA64', '; Win64; x64', '']
RAND_UAS = ['Mozilla/5.0 ({win_ver}{feature}; rv:{br_ver}) Gecko/20100101 Firefox/{br_ver}',
'Mozilla/5.0 ({win_ver}{feature}) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/{br_ver} Safari/537.36',
'Mozilla/5.0 ({win_ver}{feature}; Trident/7.0; rv:{br_ver}) like Gecko',
'Mozilla/5.0 (compatible; MSIE {br_ver}; {win_ver}{feature}; Trident/6.0)']
index = random.randrange(len(RAND_UAS))
return RAND_UAS[index].format(win_ver=random.choice(WIN_VERS), feature=random.choice(FEATURES), br_ver=random.choice(BR_VERS[index])) | mrknow/filmkodi | [
68,
68,
68,
206,
1444160337
] |
def file_quality_openload(url):
try:
if '1080' in url:
return {'quality': '1080p'}
elif '720' in url:
return {'quality': 'HD'}
else:
return {'quality': 'SD'}
except:
return {'quality': 'SD', 'url': url} | mrknow/filmkodi | [
68,
68,
68,
206,
1444160337
] |
def __init__(self):
self.cookie = None | mrknow/filmkodi | [
68,
68,
68,
206,
1444160337
] |
def get_cookie(self, netloc, ua, timeout):
try:
headers = {'User-Agent': ua}
request = urllib2.Request(netloc, headers=headers)
try:
response = urllib2.urlopen(request, timeout=int(timeout))
except urllib2.HTTPError as response:
result = response.read(5242880)
try: encoding = response.info().getheader('Content-Encoding')
except: encoding = None
if encoding == 'gzip':
result = gzip.GzipFile(fileobj=StringIO.StringIO(result)).read()
jschl = re.findall('name="jschl_vc" value="(.+?)"/>', result)[0]
init = re.findall('setTimeout\(function\(\){\s*.*?.*:(.*?)};', result)[-1]
builder = re.findall(r"challenge-form\'\);\s*(.*)a.v", result)[0]
decryptVal = self.parseJSString(init)
lines = builder.split(';')
for line in lines:
if len(line) > 0 and '=' in line:
sections=line.split('=')
line_val = self.parseJSString(sections[1])
decryptVal = int(eval(str(decryptVal)+sections[0][-1]+str(line_val)))
answer = decryptVal + len(urlparse.urlparse(netloc).netloc)
query = '%s/cdn-cgi/l/chk_jschl?jschl_vc=%s&jschl_answer=%s' % (netloc, jschl, answer)
if 'type="hidden" name="pass"' in result:
passval = re.findall('name="pass" value="(.*?)"', result)[0]
query = '%s/cdn-cgi/l/chk_jschl?pass=%s&jschl_vc=%s&jschl_answer=%s' % (netloc, urllib.quote_plus(passval), jschl, answer)
time.sleep(6)
cookies = cookielib.LWPCookieJar()
handlers = [urllib2.HTTPHandler(), urllib2.HTTPSHandler(), urllib2.HTTPCookieProcessor(cookies)]
opener = urllib2.build_opener(*handlers)
opener = urllib2.install_opener(opener)
try:
request = urllib2.Request(query, headers=headers)
response = urllib2.urlopen(request, timeout=int(timeout))
except:
pass
cookie = '; '.join(['%s=%s' % (i.name, i.value) for i in cookies])
if 'cf_clearance' in cookie: self.cookie = cookie
except:
pass | mrknow/filmkodi | [
68,
68,
68,
206,
1444160337
] |
def __init__(self):
self.COOKIE_NAME = 'BLAZINGFAST-WEB-PROTECT' | mrknow/filmkodi | [
68,
68,
68,
206,
1444160337
] |
def getCookieString(self, content, rcksid):
vars = re.findall('toNumbers\("([^"]+)"', content)
value = self._decrypt(vars[2], vars[0], vars[1])
cookie = "%s=%s;%s" % (self.COOKIE_NAME, value, rcksid)
return cookie | mrknow/filmkodi | [
68,
68,
68,
206,
1444160337
] |
def __init__(self):
self.cookie = None | mrknow/filmkodi | [
68,
68,
68,
206,
1444160337
] |
def parseJSString(s):
try:
offset=1 if s[0]=='+' else 0
val = int(eval(s.replace('!+[]','1').replace('!![]','1').replace('[]','0').replace('(','str(')[offset:]))
return val
except:
pass | mrknow/filmkodi | [
68,
68,
68,
206,
1444160337
] |
def windows_build(args):
logging.info("Using vcvars environment:\n{}".format(args.vcvars))
path = args.output
os.makedirs(path, exist_ok=True)
mxnet_root = get_mxnet_root()
logging.info("Found MXNet root: {}".format(mxnet_root))
with remember_cwd():
os.chdir(path)
cmd = "\"{}\" && cmake -G \"NMake Makefiles JOM\" {} {}".format(args.vcvars,
CMAKE_FLAGS[args.flavour],
mxnet_root)
logging.info("Generating project with CMake:\n{}".format(cmd))
check_call(cmd, shell=True)
cmd = "\"{}\" && jom".format(args.vcvars)
logging.info("Building with jom:\n{}".format(cmd))
t0 = int(time.time())
check_call(cmd, shell=True)
logging.info("Build flavour: {} complete in directory: \"{}\"".format(args.flavour, os.path.abspath(path)))
logging.info("Build took {}".format(datetime.timedelta(seconds=int(time.time() - t0))))
windows_package(args) | dmlc/mxnet | [
20293,
6870,
20293,
1995,
1430410875
] |
def nix_build(args):
path = args.output
os.makedirs(path, exist_ok=True)
with remember_cwd():
os.chdir(path)
logging.info("Generating project with CMake")
check_call("cmake \
-DUSE_CUDA=OFF \
-DUSE_OPENCV=OFF \
-DUSE_OPENMP=OFF \
-DCMAKE_BUILD_TYPE=Debug \
-GNinja ..", shell=True)
check_call("ninja", shell=True) | dmlc/mxnet | [
20293,
6870,
20293,
1995,
1430410875
] |
def prepare_default_pager(
clear_screen: bool = False,
quit_if_one_screen: bool = True,
ansi_escapes: bool = True,
chop_long_lines: bool = True,
no_init: bool = True,
no_tilde: bool = False, | dmpetrov/dataversioncontrol | [
11197,
1036,
11197,
597,
1488615393
] |
def make_pager(cmd=None):
def _pager(text):
return pydoc.tempfilepager(pydoc.plain(text), cmd)
return _pager if cmd else pydoc.plainpager | dmpetrov/dataversioncontrol | [
11197,
1036,
11197,
597,
1488615393
] |
def pager(text: str) -> None:
_pager = find_pager()
logger.trace(f"Using pager: '{_pager}'") # type: ignore[attr-defined]
make_pager(_pager)(text) | dmpetrov/dataversioncontrol | [
11197,
1036,
11197,
597,
1488615393
] |
def __init__(self, ts_start, config, test_id=None):
self.ts_start = ts_start
self.ts_end = None
self.test_id = test_id
self.config = config
self.description = ''
self.input_directory = None
self.output_directory = None
self.resource_path = 'resources'
self.status = CONSTANTS.OK
self.sla_data = {}
self.stats_data = {}
self.variables = None | linkedin/naarad | [
240,
67,
240,
66,
1383348894
] |
def __init__(self):
self._default_test_id = -1
self._analyses = {}
self._resource_path = 'resources'
self._input_directory = None
self._output_directory = None
self.return_exit_code = False
self.skip_plots = False
self.available_graphing_modules = graphing_modules
logger.info('Available graphing modules: %s ', ','.join(self.available_graphing_modules.keys()))
naarad.metrics.metric.Metric.graphing_modules = self.available_graphing_modules
naarad.reporting.diff.Diff.graphing_modules = self.available_graphing_modules
naarad.metrics.metric.Metric.device_types = CONSTANTS.device_type_metrics | linkedin/naarad | [
240,
67,
240,
66,
1383348894
] |
def signal_start(self, config, test_id=None, **kwargs):
"""
Initialize an analysis object and set ts_start for the analysis represented by test_id
:param test_id: integer that represents the analysis
:param config: config can be a ConfigParser.ConfigParser object or a string specifying local or http(s) location
for config
:return: test_id
"""
if not test_id:
self._default_test_id += 1
test_id = self._default_test_id
self._analyses[test_id] = _Analysis(naarad.utils.get_standardized_timestamp('now', None), config,
test_id=test_id)
if kwargs:
if 'description' in kwargs.keys():
self._analyses[test_id].description = kwargs['description']
if 'input_directory' in kwargs.keys():
self._analyses[test_id].input_directory = kwargs['input_directory']
if 'output_directory' in kwargs.keys():
self._analyses[test_id].output_directory = kwargs['output_directory']
return test_id | linkedin/naarad | [
240,
67,
240,
66,
1383348894
] |
def get_failed_analyses(self):
"""
Returns a list of test_id for which naarad analysis failed
:return: list of test_ids
"""
failed_analyses = []
for test_id in self._analyses.keys():
if self._analyses[test_id].status != CONSTANTS.OK:
failed_analyses.append(test_id)
return failed_analyses | linkedin/naarad | [
240,
67,
240,
66,
1383348894
] |
def _set_sla_data(self, test_id, metrics):
"""
Get sla data from each metric and set it in the _Analysis object specified by test_id to make it available
for retrieval
:return: currently always returns CONSTANTS.OK. Maybe enhanced in future to return additional status
"""
for metric in metrics:
self._analyses[test_id].sla_data[metric.label] = metric.sla_map
return CONSTANTS.OK | linkedin/naarad | [
240,
67,
240,
66,
1383348894
] |
def _set_stats_data(self, test_id, metrics):
"""
Get summary stats data from each metric and set it in the _Analysis object specified by test_id to make it available
for retrieval
:return: currently always returns CONSTANTS.OK. Maybe enhanced in future to return additional status
"""
for metric in metrics:
self._analyses[test_id].stats_data[metric.label] = metric.summary_stats
return CONSTANTS.OK | linkedin/naarad | [
240,
67,
240,
66,
1383348894
] |
def _run_pre(self, analysis, run_steps):
"""
If Naarad is run in CLI mode, execute any pre run steps specified in the config. ts_start/ts_end are set based on
workload run steps if any.
:param: analysis: The analysis object being processed
:param: run_steps: list of post run steps
"""
workload_run_steps = []
for run_step in sorted(run_steps, key=lambda step: step.run_rank):
run_step.run()
if run_step.run_type == CONSTANTS.RUN_TYPE_WORKLOAD:
workload_run_steps.append(run_step)
# Get analysis time period from workload run steps
if len(workload_run_steps) > 0:
analysis.ts_start, analysis.ts_end = naarad.utils.get_run_time_period(workload_run_steps)
return CONSTANTS.OK | linkedin/naarad | [
240,
67,
240,
66,
1383348894
] |
def _process_args(self, analysis, args):
"""
When Naarad is run in CLI mode, get the CL arguments and update the analysis
:param: analysis: The analysis being processed
:param: args: Command Line Arguments received by naarad
"""
if args.exit_code:
self.return_exit_code = args.exit_code
if args.no_plots:
self.skip_plots = args.no_plots
if args.start:
analysis.ts_start = naarad.utils.get_standardized_timestamp(args.start, None)
if args.end:
analysis.ts_end = naarad.utils.get_standardized_timestamp(args.end, None)
if args.variables:
analysis.variables = naarad.utils.get_variables(args)
return CONSTANTS.OK | linkedin/naarad | [
240,
67,
240,
66,
1383348894
] |
def run(self, analysis, is_api_call, **kwargs):
"""
:param analysis: Run naarad analysis for the specified analysis object
:param **kwargs: Additional keyword args can be passed in here for future enhancements
:return:
"""
threads = []
crossplots = []
report_args = {}
metrics = defaultdict()
run_steps = defaultdict(list)
discovery_mode = False
graph_timezone = None
graphing_library = None
if isinstance(analysis.config, str):
if not naarad.utils.is_valid_file(analysis.config):
return CONSTANTS.INVALID_CONFIG
config_object = ConfigParser.ConfigParser(analysis.variables)
config_object.optionxform = str
config_object.read(analysis.config)
elif isinstance(analysis.config, ConfigParser.ConfigParser):
config_object = analysis.config
else:
if is_api_call:
return CONSTANTS.INVALID_CONFIG
else:
metrics['metrics'] = naarad.utils.discover_by_name(analysis.input_directory, analysis.output_directory)
if len(metrics['metrics']) == 0:
logger.warning('Unable to auto detect metrics in the specified input directory: %s', analysis.input_directory)
return CONSTANTS.ERROR
else:
discovery_mode = True
metrics['aggregate_metrics'] = []
if not discovery_mode:
metrics, run_steps, crossplots, report_args, graph_timezone, graphing_library = self._process_naarad_config(config_object, analysis)
if graphing_library is None:
graphing_library = CONSTANTS.DEFAULT_GRAPHING_LIBRARY
# If graphing libraries are not installed, skip static images
if graphing_library not in self.available_graphing_modules.keys():
logger.error("Naarad cannot import graphing library %s on your system. Will not generate static charts", graphing_library)
self.skip_plots = True
if not is_api_call:
self._run_pre(analysis, run_steps['pre'])
for metric in metrics['metrics']:
if analysis.ts_start:
metric.ts_start = analysis.ts_start
if analysis.ts_end:
metric.ts_end = analysis.ts_end
thread = threading.Thread(target=naarad.utils.parse_and_plot_single_metrics,
args=(metric, graph_timezone, analysis.output_directory, analysis.input_directory, graphing_library, self.skip_plots))
thread.start()
threads.append(thread)
for t in threads:
t.join()
for metric in metrics['aggregate_metrics']:
thread = threading.Thread(target=naarad.utils.parse_and_plot_single_metrics,
args=(metric, graph_timezone, analysis.output_directory, analysis.input_directory, graphing_library, self.skip_plots))
thread.start()
threads.append(thread)
for t in threads:
t.join()
self._set_sla_data(analysis.test_id, metrics['metrics'] + metrics['aggregate_metrics'])
self._set_stats_data(analysis.test_id, metrics['metrics'] + metrics['aggregate_metrics'])
if len(crossplots) > 0 and not self.skip_plots:
correlated_plots = naarad.utils.nway_plotting(crossplots, metrics['metrics'] + metrics['aggregate_metrics'],
os.path.join(analysis.output_directory, analysis.resource_path),
analysis.resource_path, graphing_library)
else:
correlated_plots = []
rpt = reporting_modules['report'](None, analysis.output_directory, os.path.join(analysis.output_directory, analysis.resource_path), analysis.resource_path,
metrics['metrics'] + metrics['aggregate_metrics'], correlated_plots=correlated_plots, **report_args)
rpt.generate()
if not is_api_call:
self._run_post(run_steps['post'])
if self.return_exit_code:
for metric in metrics['metrics'] + metrics['aggregate_metrics']:
if metric.status == CONSTANTS.SLA_FAILED:
return CONSTANTS.SLA_FAILURE
return CONSTANTS.OK | linkedin/naarad | [
240,
67,
240,
66,
1383348894
] |
def diff_reports_by_location(self, report1_location, report2_location, output_directory, config=None, **kwargs):
"""
Create a diff report using report1 as a baseline
:param: report1_location: report to be used as baseline
:param: report2_location: report to compare against baseline
:param: config file for diff (optional)
:param: **kwargs: keyword arguments
"""
if kwargs:
if 'output_directory' in kwargs.keys():
output_directory = kwargs['output_directory']
diff_report = Diff([NaaradReport(report1_location, None), NaaradReport(report2_location, None)], 'diff',
output_directory, os.path.join(output_directory, self._resource_path), self._resource_path)
if config:
naarad.utils.extract_diff_sla_from_config_file(diff_report, config)
diff_report.generate()
if diff_report.sla_failures > 0:
return CONSTANTS.SLA_FAILURE
if diff_report.status != 'OK':
return CONSTANTS.ERROR
return CONSTANTS.OK | linkedin/naarad | [
240,
67,
240,
66,
1383348894
] |
def AddError(err):
errors.append(err) | DLR-SC/tigl | [
186,
54,
186,
96,
1419243179
] |
def MockError(token, category, message):
AddError((token, category, message))
print token, category, message | DLR-SC/tigl | [
186,
54,
186,
96,
1419243179
] |
def setUp(self):
nsiqcppstyle_rulemanager.ruleManager.ResetRules()
nsiqcppstyle_rulemanager.ruleManager.ResetRegisteredRules()
nsiqcppstyle_state._nsiqcppstyle_state.verbose = True
nsiqcppstyle_reporter.Error = MockError
self.setUpRule()
global errors
errors = [] | DLR-SC/tigl | [
186,
54,
186,
96,
1419243179
] |
def gen_zip(url):
"""Returns swarming_bot.zip content."""
with open(os.path.join(BOT_DIR, 'config', 'bot_config.py'), 'rb') as f:
bot_config_content = f.read()
return bot_archive.get_swarming_bot_zip(
BOT_DIR, url, '1', {'config/bot_config.py': bot_config_content}, None) | luci/luci-py | [
70,
40,
70,
82,
1427740754
] |
def do_GET(self):
if self.path == '/swarming/api/v1/bot/server_ping':
self.send_response(200)
self.end_headers()
return None
if self.path == '/auth/api/v1/server/oauth_config':
return self.send_json({
'client_id': 'id',
'client_not_so_secret': 'hunter2',
'primary_url': self.server.url,
})
raise NotImplementedError(self.path) | luci/luci-py | [
70,
40,
70,
82,
1427740754
] |
def do_PUT(self):
raise NotImplementedError(self.path) | luci/luci-py | [
70,
40,
70,
82,
1427740754
] |
def __init__(self):
super(Server, self).__init__()
self._lock = threading.Lock()
# Accumulated bot events.
self._bot_events = []
# Running tasks.
self._tasks = {}
# Bot reported task errors.
self._task_errors = {}
self.has_polled = threading.Event()
self.has_updated_task = threading.Event()
self.must_stop = False | luci/luci-py | [
70,
40,
70,
82,
1427740754
] |
def get_tasks(self):
"""Returns the tasks run by the bots."""
with self._lock:
return copy.deepcopy(self._tasks) | luci/luci-py | [
70,
40,
70,
82,
1427740754
] |
def _add_bot_event(self, data):
# Used by the handler.
with self._lock:
self._bot_events.append(data) | luci/luci-py | [
70,
40,
70,
82,
1427740754
] |
def GetStates(S):
global states
states = S
#=====================#
# Get Trajectory #
#=====================# | riscmaster/risc_maap | [
1,
1,
1,
1,
1431718259
] |
def GetBatt(S):
global nominal_thrust
B = S.battery_remaining
# coefficients for fourth order fit
# determined 11 May 2015 by Spencer Maughan and Ishmaal Erekson
c0 = 0.491674747062374
c1 = -0.024809293286468
c2 = 0.000662710609466
c3 = -0.000008160593348
c4 = 0.000000033699651
nominal_thrust = c0+c1*B+c2*B**2+c3*B**3+c4*B**4
#============================#
# Get Controller Status #
#============================# | riscmaster/risc_maap | [
1,
1,
1,
1,
1431718259
] |
def Basic_Controller():
global states, euler_max, max_yaw_rate, pub_ctrl,K,traj
Ctrl = Controls()
Ctrl.Obj = [Control()]*1
Ctrl.header.stamp = states.header.stamp
g = 9.80665 # average value of earth's gravitational constant m/s^2
m = 1.282 # IRIS mass in kg
#===================================#
# Get State Trajectory Errors #
#===================================#
if states.Obj[0].visible:
X = np.asmatrix(np.zeros((7,1)))
X[0] = traj.Obj[0].x-states.Obj[0].x
X[1] = traj.Obj[0].y-states.Obj[0].y
X[2] = traj.Obj[0].z-states.Obj[0].z
X[3] = traj.Obj[0].xdot-states.Obj[0].u
X[4] = traj.Obj[0].ydot-states.Obj[0].v
X[5] = traj.Obj[0].zdot-states.Obj[0].w
X[6] = traj.Obj[0].psi-states.Obj[0].psi*np.pi/180
#============================================#
# Differential Flatness Control Input #
#============================================#
# LQR input
utilde = -K*X
# required input
u_r = np.asmatrix(np.zeros((4,1)))
u = utilde+u_r-np.matrix([[0],[0],[9.81],[0]])
#==================================#
# Rotate to Vehicle 1 Frame #
#==================================#
psi = states.Obj[0].psi*np.pi/180
rotZ = np.matrix([[cos(psi), sin(psi), 0],[-sin(psi), cos(psi), 0],[0, 0, 1]])
Cart = np.matrix([[1, 0, 0],[0, -1, 0],[0, 0, -1]])
u[:-1] = Cart*rotZ*u[:-1]
#===================================#
# Normalize given the Thrust #
#===================================#
T = sqrt(u[0:3].T*u[0:3])
u[:-1] = np.divide(u[:-1],-T)
#==================#
# Set Controls #
#==================#
# Controls for Ardrone
# -phi = right... +phi = left
# -theta = back... +theta = forward
# -psi = right... +psi = left
global phi_trim,theta_trim,phi_scale,theta_scale
phi_d = (asin(u[1,-1]))
theta_d = (-asin(u[0,-1]))
ctrl = Control()
ctrl.name = states.Obj[0].name
ctrl.phi = phi_trim + phi_scale*phi_d
ctrl.theta = theta_trim + theta_scale*theta_d
ctrl.psi = -u[3,-1]/max_yaw_rate
global nominal_thrust
T_d = nominal_thrust+(T-g)/g
ctrl.T = T_d
Ctrl.Obj[0] = ctrl
Ctrl.header = states.header
#rospy.loginfo("latency = %f",states.header.stamp.to_sec()-rospy.get_time())
pub_ctrl.publish(Ctrl)
#===================#
# Main #
#===================# | riscmaster/risc_maap | [
1,
1,
1,
1,
1431718259
] |
def setUp(self):
super(PodcastTestCase, self).setUp()
self.client = Client()
# show
show = Show.objects.create(
title='All About Everything',
slug='everything',
description='All About Everything is a show about everything. Each week we dive into any subject known to man and talk about it as much as we can. Look for our podcast in the Podcasts app or in the iTunes Store',
managing_editor='john.doe@example.com',
webmaster='',
ttl=60,
subtitle='A show about everything',
summary='',
author_name='John Doe',
author_email='',
owner_name='John Doe',
owner_email='john.doe@example.com',
copyright='John Doe & Family',
image='podcast/tests/static/everything/AllAboutEverything.jpg',
explicit=False,
block=False,
complete=False,
)
show.categories.add(1, 4, 62, 63, 67)
# episode 1
episode_1 = Episode.objects.create(
show=show,
title='Shake Shake Shake Your Spices',
slug='shake-shake-shake-your-spices',
description='This week we talk about <a href="https://itunes/apple.com/us/book/antique-trader-salt-pepper/id429691295?mt=11">salt and pepper shakers</a>, comparing and contrasting pour rates, construction materials, and overall aesthetics. Come and join the party!',
pub_date=timezone.make_aware(datetime.datetime.strptime('2016-03-08T12:00:00', '%Y-%m-%dT%H:%M:%S')),
summary='A short primer on table spices',
image='podcast/tests/static/everything/AllAboutEverything/Episode1.jpg',
explicit=False,
block=False,
)
# episode 2
episode_2 = Episode.objects.create(
show=show,
title='Socket Wrench Shootout',
slug='socket-wrench-shootout',
description='This week we talk about metric vs. Old English socket wrenches. Which one is better? Do you really need both? Get all of your answers here.',
pub_date=timezone.make_aware(datetime.datetime.strptime('2016-03-09T18:00:00', '%Y-%m-%dT%H:%M:%S')),
summary='Comparing socket wrenches is fun!',
author_name='Jane Doe',
image='podcast/tests/static/everything/AllAboutEverything/Episode2.jpg',
explicit=False,
block=False,
)
# episode 3
episode_3 = Episode.objects.create(
show=show,
title='The Best Chili',
slug='best-chili',
description='This week we talk about the best Chili in the world. Which chili is better?',
pub_date=timezone.make_aware(datetime.datetime.strptime('2016-03-10T09:00:00', '%Y-%m-%dT%H:%M:%S')),
summary='Jane and Eric',
author_name='Jane Doe',
image='podcast/tests/static/everything/AllAboutEverything/Episode3.jpg',
explicit=False,
block=False,
)
# episode 4
episode_4 = Episode.objects.create(
show=show,
title='Red,Whine, & Blue',
slug='red-whine-blue',
description='This week we talk about surviving in a Red state if you are a Blue person. Or vice versa.',
pub_date=timezone.make_aware(datetime.datetime.strptime('2016-03-10T22:15:00', '%Y-%m-%dT%H:%M:%S')),
summary='Red + Blue != Purple',
author_name='Various',
image='podcast/tests/static/everything/AllAboutEverything/Episode4.jpg',
explicit=False,
block=False,
)
# enclosure 1
Enclosure.objects.create(
episode=episode_1,
file='podcast/tests/static/everything/AllAboutEverythingEpisode3.m4a',
type='audio/x-m4a',
cc=False,
)
# enclosure 2
Enclosure.objects.create(
episode=episode_2,
file='podcast/tests/static/everything/AllAboutEverythingEpisode2.mp4',
type='video/mp4',
cc=False,
)
# enclosure 3
Enclosure.objects.create(
episode=episode_3,
file='podcast/tests/static/everything/AllAboutEverythingEpisode2.m4v',
type='video/x-m4v',
cc=True,
)
# enclosure 4
Enclosure.objects.create(
episode=episode_4,
file='podcast/tests/static/everything/AllAboutEverythingEpisode4.mp3',
type='audio/mpeg',
cc=False,
) | richardcornish/django-itunespodcast | [
11,
9,
11,
3,
1474854128
] |
def setUpTestData(cls):
UsernameSnippet.objects.create(available=True) | jeremyphilemon/uniqna | [
11,
9,
11,
33,
1483453818
] |
def __init__(self, inst, obj):
self.client = None
self.load_into(inst, obj) | b1naryth1ef/rowboat | [
117,
91,
117,
7,
1475878220
] |
def parse(cls, obj, *args, **kwargs):
inst = PluginConfigObj()
cls(inst, obj)
return inst | b1naryth1ef/rowboat | [
117,
91,
117,
7,
1475878220
] |
def force_load_plugin_configs(cls):
"""
This function can be called to ensure that this class will have all its
attributes properly loaded, as they are dynamically set when plugin configs
are defined.
"""
plugins = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', 'plugins')
for name in os.listdir(plugins):
__import__('rowboat.plugins.{}'.format(
name.rsplit('.', 1)[0]
)) | b1naryth1ef/rowboat | [
117,
91,
117,
7,
1475878220
] |
def get_command_override(self, command):
return rule_matcher(command, self.overrides or []) | b1naryth1ef/rowboat | [
117,
91,
117,
7,
1475878220
] |
def get_printable_field_value(instance, fieldname):
""" Get the display value of a model field, showing a comma-delimited
list for M2M fields.
"""
field = instance._meta.get_field(fieldname)
field_value = getattr(instance, fieldname)
if isinstance(field, models.ManyToManyField):
field_value = ', '.join([unicode(f) for f in
field_value.all()])
return field_value | SohoTechLabs/django-ajax-changelist | [
2,
5,
2,
1,
1365111694
] |
def __init__(self, model, valid_fields, **kwargs):
self.model = model
self.valid_fields = valid_fields | SohoTechLabs/django-ajax-changelist | [
2,
5,
2,
1,
1365111694
] |
def __init__(self, *args, **kwargs):
HANDLER_NAME_TPL = "_%s_ajax_handler"
if not hasattr(self, 'ajax_list_display'):
self.ajax_list_display = []
self.list_display = list(self.list_display)
self.list_display = self.list_display + map(lambda name: HANDLER_NAME_TPL % name,
self.ajax_list_display)
super(AjaxModelAdmin, self).__init__(*args, **kwargs)
for name in self.ajax_list_display:
setattr(self, HANDLER_NAME_TPL % name,
self._get_field_handler(name))
self.ajax_item_template = loader.get_template('ajax_changelist/'
'field_form.html') | SohoTechLabs/django-ajax-changelist | [
2,
5,
2,
1,
1365111694
] |
def _get_field_handler(self, fieldname):
""" Handle rendering of AJAX-editable fields for the changelist, by
dynamically building a callable for each field.
"""
def handler_function(obj, *args, **kwargs):
ItemForm = modelform_factory(self.model, fields=(fieldname,))
form = ItemForm(instance=obj, prefix="c" + unicode(obj.id))
field_value = get_printable_field_value(obj, fieldname)
# Render the field value and edit form
return self.ajax_item_template.render(Context({
'object_id': obj.id,
'field_name': fieldname,
'form': form.as_p(),
'field_value': field_value
}))
handler_function.allow_tags = True
handler_function.short_description = fieldname
return handler_function | SohoTechLabs/django-ajax-changelist | [
2,
5,
2,
1,
1365111694
] |
def getStep(self, stepid=None, buildid=None, number=None, name=None):
tbl = self.db.model.steps
if stepid is not None:
wc = (tbl.c.id == stepid)
else:
if buildid is None:
raise RuntimeError('must supply either stepid or buildid')
if number is not None:
wc = (tbl.c.number == number)
elif name is not None:
wc = (tbl.c.name == name)
else:
raise RuntimeError('must supply either number or name')
wc = wc & (tbl.c.buildid == buildid)
def thd(conn):
q = self.db.model.steps.select(whereclause=wc)
res = conn.execute(q)
row = res.fetchone()
rv = None
if row:
rv = self._stepdictFromRow(row)
res.close()
return rv
return (yield self.db.pool.do(thd)) | tardyp/buildbot | [
6,
2,
6,
1,
1285148030
] |
def getSteps(self, buildid):
def thd(conn):
tbl = self.db.model.steps
q = tbl.select()
q = q.where(tbl.c.buildid == buildid)
q = q.order_by(tbl.c.number)
res = conn.execute(q)
return [self._stepdictFromRow(row) for row in res.fetchall()]
return self.db.pool.do(thd) | tardyp/buildbot | [
6,
2,
6,
1,
1285148030
] |
def addStep(self, buildid, name, state_string):
def thd(conn):
tbl = self.db.model.steps
# get the highest current number
r = conn.execute(sa.select([sa.func.max(tbl.c.number)],
whereclause=(tbl.c.buildid == buildid)))
number = r.scalar()
number = 0 if number is None else number + 1
# note that there is no chance for a race condition here,
# since only one master is inserting steps. If there is a
# conflict, then the name is likely already taken.
insert_row = dict(buildid=buildid, number=number,
started_at=None, complete_at=None,
state_string=state_string,
urls_json='[]', name=name)
try:
r = conn.execute(self.db.model.steps.insert(), insert_row)
got_id = r.inserted_primary_key[0]
except (sa.exc.IntegrityError, sa.exc.ProgrammingError):
got_id = None
if got_id:
return (got_id, number, name)
# we didn't get an id, so calculate a unique name and use that
# instead. Because names are truncated at the right to fit in a
# 50-character identifier, this isn't a simple query.
res = conn.execute(sa.select([tbl.c.name],
whereclause=((tbl.c.buildid == buildid))))
names = {row[0] for row in res}
num = 1
while True:
numstr = '_%d' % num
newname = name[:50 - len(numstr)] + numstr
if newname not in names:
break
num += 1
insert_row['name'] = newname
r = conn.execute(self.db.model.steps.insert(), insert_row)
got_id = r.inserted_primary_key[0]
return (got_id, number, newname)
return self.db.pool.do(thd) | tardyp/buildbot | [
6,
2,
6,
1,
1285148030
] |
def startStep(self, stepid):
started_at = int(self.master.reactor.seconds())
def thd(conn):
tbl = self.db.model.steps
q = tbl.update(whereclause=(tbl.c.id == stepid))
conn.execute(q, started_at=started_at)
yield self.db.pool.do(thd) | tardyp/buildbot | [
6,
2,
6,
1,
1285148030
] |
def setStepStateString(self, stepid, state_string):
def thd(conn):
tbl = self.db.model.steps
q = tbl.update(whereclause=(tbl.c.id == stepid))
conn.execute(q, state_string=state_string)
return self.db.pool.do(thd) | tardyp/buildbot | [
6,
2,
6,
1,
1285148030
] |
def thd(conn):
tbl = self.db.model.steps
wc = (tbl.c.id == stepid)
q = sa.select([tbl.c.urls_json],
whereclause=wc)
res = conn.execute(q)
row = res.fetchone()
if _racehook is not None:
_racehook()
urls = json.loads(row.urls_json)
url_item = dict(name=name, url=url)
if url_item not in urls:
urls.append(url_item)
q = tbl.update(whereclause=wc)
conn.execute(q, urls_json=json.dumps(urls)) | tardyp/buildbot | [
6,
2,
6,
1,
1285148030
] |
def finishStep(self, stepid, results, hidden):
def thd(conn):
tbl = self.db.model.steps
q = tbl.update(whereclause=(tbl.c.id == stepid))
conn.execute(q,
complete_at=int(self.master.reactor.seconds()),
results=results,
hidden=1 if hidden else 0)
return self.db.pool.do(thd) | tardyp/buildbot | [
6,
2,
6,
1,
1285148030
] |
def setUp(self):
filename = "testdata/nasa_gv_mc3e_2dvd_test.txt"
self.dsd = NASA_2DVD_reader.read_2dvd_dsd_nasa_gv(filename) | josephhardinee/PyDisdrometer | [
35,
29,
35,
14,
1396333080
] |
def test_dsd_nd_exists(self):
self.assertIsNotNone(self.dsd.fields["Nd"], "DSD Object has no Nd field") | josephhardinee/PyDisdrometer | [
35,
29,
35,
14,
1396333080
] |
def test_RR_works(self):
self.dsd.calculate_RR()
self.assertIsNotNone(
self.dsd.fields["rain_rate"],
"Rain Rate is not in fields after calculate_RR()",
)
self.assertEqual(
len(self.dsd.fields["rain_rate"]["data"]),
5,
"Wrong number of time samples in rain rate",
) | josephhardinee/PyDisdrometer | [
35,
29,
35,
14,
1396333080
] |
def is_continuous(self):
"""
Whether or not the domain has an uncountable number of values.
:type: `bool`
"""
pass | QInfer/python-qinfer | [
91,
32,
91,
22,
1344992565
] |
def is_finite(self):
"""
Whether or not the domain contains a finite number of points.
:type: `bool`
"""
pass | QInfer/python-qinfer | [
91,
32,
91,
22,
1344992565
] |
def dtype(self):
"""
The numpy dtype of a single element of the domain.
:type: `np.dtype`
"""
pass | QInfer/python-qinfer | [
91,
32,
91,
22,
1344992565
] |
def n_members(self):
"""
Returns the number of members in the domain if it
`is_finite`, otherwise, returns `np.inf`.
:type: ``int`` or ``np.inf``
"""
pass | QInfer/python-qinfer | [
91,
32,
91,
22,
1344992565
] |
def example_point(self):
"""
Returns any single point guaranteed to be in the domain, but
no other guarantees; useful for testing purposes.
This is given as a size 1 ``np.array`` of type `dtype`.
:type: ``np.ndarray``
"""
pass | QInfer/python-qinfer | [
91,
32,
91,
22,
1344992565
] |
def values(self):
"""
Returns an `np.array` of type `dtype` containing
some values from the domain.
For domains where `is_finite` is ``True``, all elements
of the domain will be yielded exactly once.
:rtype: `np.ndarray`
"""
pass | QInfer/python-qinfer | [
91,
32,
91,
22,
1344992565
] |
def is_discrete(self):
"""
Whether or not the domain has a countable number of values.
:type: `bool`
"""
return not self.is_continuous | QInfer/python-qinfer | [
91,
32,
91,
22,
1344992565
] |
def in_domain(self, points):
"""
Returns ``True`` if all of the given points are in the domain,
``False`` otherwise.
:param np.ndarray points: An `np.ndarray` of type `self.dtype`.
:rtype: `bool`
"""
pass | QInfer/python-qinfer | [
91,
32,
91,
22,
1344992565
] |
def __init__(self, *domains): | QInfer/python-qinfer | [
91,
32,
91,
22,
1344992565
] |
def is_continuous(self):
"""
Whether or not the domain has an uncountable number of values.
:type: `bool`
"""
return any([domain.is_continuous for domain in self._domains]) | QInfer/python-qinfer | [
91,
32,
91,
22,
1344992565
] |
def is_finite(self):
"""
Whether or not the domain contains a finite number of points.
:type: `bool`
"""
return all([domain.is_finite for domain in self._domains]) | QInfer/python-qinfer | [
91,
32,
91,
22,
1344992565
] |
def dtype(self):
"""
The numpy dtype of a single element of the domain.
:type: `np.dtype`
"""
return self._dtype | QInfer/python-qinfer | [
91,
32,
91,
22,
1344992565
] |
def n_members(self):
"""
Returns the number of members in the domain if it
`is_finite`, otherwise, returns `np.inf`.
:type: ``int`` or ``np.inf``
"""
if self.is_finite:
return reduce(mul, [domain.n_members for domain in self._domains], 1)
else:
return np.inf | QInfer/python-qinfer | [
91,
32,
91,
22,
1344992565
] |
def example_point(self):
"""
Returns any single point guaranteed to be in the domain, but
no other guarantees; useful for testing purposes.
This is given as a size 1 ``np.array`` of type `dtype`.
:type: ``np.ndarray``
"""
return self._example_point | QInfer/python-qinfer | [
91,
32,
91,
22,
1344992565
] |
def values(self):
"""
Returns an `np.array` of type `dtype` containing
some values from the domain.
For domains where `is_finite` is ``True``, all elements
of the domain will be yielded exactly once.
:rtype: `np.ndarray`
"""
separate_values = [domain.values for domain in self._domains]
return np.concatenate([
join_struct_arrays(list(map(np.array, value)))
for value in product(*separate_values)
]) | QInfer/python-qinfer | [
91,
32,
91,
22,
1344992565
] |
def _mytype(self, array):
# astype does weird stuff with struct names, and possibly
# depends on numpy version; hopefully
# the following is a bit more predictable since it passes through
# uint8
return separate_struct_array(array, self.dtype)[0] | QInfer/python-qinfer | [
91,
32,
91,
22,
1344992565
] |
def to_regular_arrays(self, array):
"""
Expands from an array of type `self.dtype` into a list of
arrays with dtypes corresponding to the factor domains.
:param np.ndarray array: An `np.array` of type `self.dtype`.
:rtype: ``list``
"""
return separate_struct_array(self._mytype(array), self._dtypes) | QInfer/python-qinfer | [
91,
32,
91,
22,
1344992565
] |
def in_domain(self, points):
"""
Returns ``True`` if all of the given points are in the domain,
``False`` otherwise.
:param np.ndarray points: An `np.ndarray` of type `self.dtype`.
:rtype: `bool`
"""
return all([
domain.in_domain(array)
for domain, array in
zip(self._domains, separate_struct_array(points, self._dtypes))
]) | QInfer/python-qinfer | [
91,
32,
91,
22,
1344992565
] |
def __init__(self, min=-np.inf, max=np.inf):
self._min = min
self._max = max | QInfer/python-qinfer | [
91,
32,
91,
22,
1344992565
] |
def min(self):
"""
Returns the minimum value of the domain.
:rtype: `float`
"""
return self._min | QInfer/python-qinfer | [
91,
32,
91,
22,
1344992565
] |
def max(self):
"""
Returns the maximum value of the domain.
:rtype: `float`
"""
return self._max | QInfer/python-qinfer | [
91,
32,
91,
22,
1344992565
] |
def is_continuous(self):
"""
Whether or not the domain has an uncountable number of values.
:type: `bool`
"""
return True | QInfer/python-qinfer | [
91,
32,
91,
22,
1344992565
] |
def is_finite(self):
"""
Whether or not the domain contains a finite number of points.
:type: `bool`
"""
return False | QInfer/python-qinfer | [
91,
32,
91,
22,
1344992565
] |
def dtype(self):
"""
The numpy dtype of a single element of the domain.
:type: `np.dtype`
"""
return np.float | QInfer/python-qinfer | [
91,
32,
91,
22,
1344992565
] |
def n_members(self):
"""
Returns the number of members in the domain if it
`is_finite`, otherwise, returns `None`.
:type: ``np.inf``
"""
return np.inf | QInfer/python-qinfer | [
91,
32,
91,
22,
1344992565
] |
def example_point(self):
"""
Returns any single point guaranteed to be in the domain, but
no other guarantees; useful for testing purposes.
This is given as a size 1 ``np.array`` of type ``dtype``.
:type: ``np.ndarray``
"""
if not np.isinf(self.min):
return np.array([self.min], dtype=self.dtype)
if not np.isinf(self.max):
return np.array([self.max], dtype=self.dtype)
else:
return np.array([0], dtype=self.dtype) | QInfer/python-qinfer | [
91,
32,
91,
22,
1344992565
] |
def values(self):
"""
Returns an `np.array` of type `self.dtype` containing
some values from the domain.
For domains where ``is_finite`` is ``True``, all elements
of the domain will be yielded exactly once.
:rtype: `np.ndarray`
"""
return self.example_point | QInfer/python-qinfer | [
91,
32,
91,
22,
1344992565
] |
def in_domain(self, points):
"""
Returns ``True`` if all of the given points are in the domain,
``False`` otherwise.
:param np.ndarray points: An `np.ndarray` of type `self.dtype`.
:rtype: `bool`
"""
if np.all(np.isreal(points)):
are_greater = np.all(np.greater_equal(points, self._min))
are_smaller = np.all(np.less_equal(points, self._max))
return are_greater and are_smaller
else:
return False | QInfer/python-qinfer | [
91,
32,
91,
22,
1344992565
] |
def __init__(self, min=0, max=np.inf):
self._min = int(min) if not np.isinf(min) else min
self._max = int(max) if not np.isinf(max) else max | QInfer/python-qinfer | [
91,
32,
91,
22,
1344992565
] |
def min(self):
"""
Returns the minimum value of the domain.
:rtype: `float` or `np.inf`
"""
return int(self._min) if not np.isinf(self._min) else self._min | QInfer/python-qinfer | [
91,
32,
91,
22,
1344992565
] |
def max(self):
"""
Returns the maximum value of the domain.
:rtype: `float` or `np.inf`
"""
return int(self._max) if not np.isinf(self._max) else self._max | QInfer/python-qinfer | [
91,
32,
91,
22,
1344992565
] |
def is_continuous(self):
"""
Whether or not the domain has an uncountable number of values.
:type: `bool`
"""
return False | QInfer/python-qinfer | [
91,
32,
91,
22,
1344992565
] |
def is_finite(self):
"""
Whether or not the domain contains a finite number of points.
:type: `bool`
"""
return not np.isinf(self.min) and not np.isinf(self.max) | QInfer/python-qinfer | [
91,
32,
91,
22,
1344992565
] |
def dtype(self):
"""
The numpy dtype of a single element of the domain.
:type: `np.dtype`
"""
return np.int | QInfer/python-qinfer | [
91,
32,
91,
22,
1344992565
] |
def n_members(self):
"""
Returns the number of members in the domain if it
`is_finite`, otherwise, returns `np.inf`.
:type: ``int`` or ``np.inf``
"""
if self.is_finite:
return int(self.max - self.min + 1)
else:
return np.inf | QInfer/python-qinfer | [
91,
32,
91,
22,
1344992565
] |
def example_point(self):
"""
Returns any single point guaranteed to be in the domain, but
no other guarantees; useful for testing purposes.
This is given as a size 1 ``np.array`` of type ``dtype``.
:type: ``np.ndarray``
"""
if not np.isinf(self.min):
return np.array([self._min], dtype=self.dtype)
if not np.isinf(self.max):
return np.array([self._max], dtype=self.dtype)
else:
return np.array([0], dtype=self.dtype) | QInfer/python-qinfer | [
91,
32,
91,
22,
1344992565
] |
def values(self):
"""
Returns an `np.array` of type `self.dtype` containing
some values from the domain.
For domains where ``is_finite`` is ``True``, all elements
of the domain will be yielded exactly once.
:rtype: `np.ndarray`
"""
if self.is_finite:
return np.arange(self.min, self.max + 1, dtype = self.dtype)
else:
return self.example_point | QInfer/python-qinfer | [
91,
32,
91,
22,
1344992565
] |
def in_domain(self, points):
"""
Returns ``True`` if all of the given points are in the domain,
``False`` otherwise.
:param np.ndarray points: An `np.ndarray` of type `self.dtype`.
:rtype: `bool`
"""
if np.all(np.isreal(points)):
try:
are_integer = np.all(np.mod(points, 1) == 0)
except TypeError:
are_integer = False
are_greater = np.all(np.greater_equal(points, self._min))
are_smaller = np.all(np.less_equal(points, self._max))
return are_integer and are_greater and are_smaller
else:
return False | QInfer/python-qinfer | [
91,
32,
91,
22,
1344992565
] |
def __init__(self, n_meas, n_elements=2):
self._n_elements = n_elements
self._n_meas = n_meas | QInfer/python-qinfer | [
91,
32,
91,
22,
1344992565
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.