hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
51aba9e17aae39b7e486fad21b079c50946d279e
| 180
|
py
|
Python
|
csv2kmz/__init__.py
|
aguinane/csvtokmz
|
3fb04797d9ec57db718e9ea595c6594ed061f04f
|
[
"MIT"
] | null | null | null |
csv2kmz/__init__.py
|
aguinane/csvtokmz
|
3fb04797d9ec57db718e9ea595c6594ed061f04f
|
[
"MIT"
] | null | null | null |
csv2kmz/__init__.py
|
aguinane/csvtokmz
|
3fb04797d9ec57db718e9ea595c6594ed061f04f
|
[
"MIT"
] | null | null | null |
"""
csv2kmz
~~~~~
Converts a parsed csv file to a kmz Google Earth overlay.
"""
from csv2kmz.buildkmz import create_kmz_from_csv
from csv2kmz.cli import cli
| 18
| 62
| 0.666667
| 25
| 180
| 4.68
| 0.64
| 0.188034
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.022388
| 0.255556
| 180
| 9
| 63
| 20
| 0.850746
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
51ad3c369f73a917b200c3987886b01b3dcc2851
| 133
|
py
|
Python
|
mre/__init__.py
|
Bohlski/mre
|
4d8e2781ac1415989bdd799d1cffb1ff77662977
|
[
"MIT"
] | null | null | null |
mre/__init__.py
|
Bohlski/mre
|
4d8e2781ac1415989bdd799d1cffb1ff77662977
|
[
"MIT"
] | null | null | null |
mre/__init__.py
|
Bohlski/mre
|
4d8e2781ac1415989bdd799d1cffb1ff77662977
|
[
"MIT"
] | null | null | null |
from .Regex import Regex
from .Quantifier import Quantifier
from .Set import Set
from .Group import Group
from .Anchor import Anchor
| 22.166667
| 34
| 0.81203
| 20
| 133
| 5.4
| 0.35
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.150376
| 133
| 5
| 35
| 26.6
| 0.955752
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
51fb3f8b228e89f4aad2b4fe3d1af31dcd0da89d
| 28,055
|
py
|
Python
|
plogical/dnsUtilities.py
|
uzairAK/serverom-panel
|
3dcde05ad618e6bef280db7d3180f926fe2ab1db
|
[
"MIT"
] | null | null | null |
plogical/dnsUtilities.py
|
uzairAK/serverom-panel
|
3dcde05ad618e6bef280db7d3180f926fe2ab1db
|
[
"MIT"
] | null | null | null |
plogical/dnsUtilities.py
|
uzairAK/serverom-panel
|
3dcde05ad618e6bef280db7d3180f926fe2ab1db
|
[
"MIT"
] | null | null | null |
#!/usr/local/CyberCP/bin/python
import os, sys
sys.path.append('/usr/local/CyberCP')
import django
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "CyberCP.settings")
try:
django.setup()
except:
pass
from plogical import CyberCPLogFileWriter as logging
import subprocess
import shlex
try:
from dns.models import Domains, Records
from manageServices.models import PDNSStatus, SlaveServers
except:
pass
import CloudFlare
from plogical.processUtilities import ProcessUtilities
class DNS:
nsd_base = "/etc/nsd/nsd.conf"
zones_base_dir = "/usr/local/lsws/conf/zones/"
create_zone_dir = "/usr/local/lsws/conf/zones"
defaultNameServersPath = '/home/cyberpanel/defaultNameservers'
CFPath = '/home/cyberpanel/CloudFlare'
## DNS Functions
def loadCFKeys(self):
cfFile = '%s%s' % (DNS.CFPath, self.admin.userName)
if os.path.exists(cfFile):
data = open(cfFile, 'r').readlines()
self.email = data[0].rstrip('\n')
self.key = data[1].rstrip('\n')
self.status = data[2].rstrip('\n')
return 1
else:
logging.CyberCPLogFileWriter.writeToFile('User %s does not have CoudFlare configured.' % (self.admin.userName))
return 0
def cfTemplate(self, zoneDomain, admin, enableCheck=None):
try:
self.admin = admin
## Get zone
if self.loadCFKeys():
if enableCheck == None:
pass
else:
if self.status == 'Enable':
pass
else:
return 0, 'Sync not enabled.'
cf = CloudFlare.CloudFlare(email=self.email, token=self.key)
try:
params = {'name': zoneDomain, 'per_page': 50}
zones = cf.zones.get(params=params)
for zone in sorted(zones, key=lambda v: v['name']):
zone = zone['id']
domain = Domains.objects.get(name=zoneDomain)
records = Records.objects.filter(domain_id=domain.id)
for record in records:
DNS.createDNSRecordCloudFlare(cf, zone, record.name, record.type, record.content, record.prio,
record.ttl)
return 1, None
except CloudFlare.exceptions.CloudFlareAPIError as e:
logging.CyberCPLogFileWriter.writeToFile(str(e))
except Exception as e:
logging.CyberCPLogFileWriter.writeToFile(str(e))
try:
zone_info = cf.zones.post(data={'jump_start': False, 'name': zoneDomain})
zone = zone_info['id']
domain = Domains.objects.get(name=zoneDomain)
records = Records.objects.filter(domain_id=domain.id)
for record in records:
DNS.createDNSRecordCloudFlare(cf, zone, record.name, record.type, record.content, record.prio,
record.ttl)
return 1, None
except CloudFlare.exceptions.CloudFlareAPIError as e:
return 0, str(e)
except Exception as e:
return 0, str(e)
except BaseException as msg:
return 0, str(e)
@staticmethod
def dnsTemplate(domain, admin):
try:
ipFile = "/etc/cyberpanel/machineIP"
f = open(ipFile)
ipData = f.read()
ipAddress = ipData.split('\n', 1)[0]
import tldextract
extractDomain = tldextract.extract(domain)
topLevelDomain = extractDomain.domain + '.' + extractDomain.suffix
subDomain = extractDomain.subdomain
if len(subDomain) == 0:
if Domains.objects.filter(name=topLevelDomain).count() == 0:
try:
pdns = PDNSStatus.objects.get(pk=1)
if pdns.type == 'MASTER':
zone = Domains(admin=admin, name=topLevelDomain, type="MASTER")
zone.save()
for items in SlaveServers.objects.all():
record = Records(domainOwner=zone,
domain_id=zone.id,
name=topLevelDomain,
type="NS",
content=items.slaveServer,
ttl=3600,
prio=0,
disabled=0,
auth=1)
record.save()
else:
zone = Domains(admin=admin, name=topLevelDomain, type="NATIVE")
except:
zone = Domains(admin=admin, name=topLevelDomain, type="NATIVE")
zone.save()
if zone.type == 'NATIVE':
if os.path.exists(DNS.defaultNameServersPath):
defaultNS = open(DNS.defaultNameServersPath, 'r').readlines()
for items in defaultNS:
if len(items) > 5:
record = Records(domainOwner=zone,
domain_id=zone.id,
name=topLevelDomain,
type="NS",
content=items.rstrip('\n'),
ttl=3600,
prio=0,
disabled=0,
auth=1)
record.save()
else:
record = Records(domainOwner=zone,
domain_id=zone.id,
name=topLevelDomain,
type="NS",
content='ns1.%s' % (topLevelDomain),
ttl=3600,
prio=0,
disabled=0,
auth=1)
record.save()
record = Records(domainOwner=zone,
domain_id=zone.id,
name=topLevelDomain,
type="NS",
content='ns2.%s' % (topLevelDomain),
ttl=3600,
prio=0,
disabled=0,
auth=1)
record.save()
content = "ns1." + topLevelDomain + " hostmaster." + topLevelDomain + " 1 10800 3600 604800 3600"
soaRecord = Records(domainOwner=zone,
domain_id=zone.id,
name=topLevelDomain,
type="SOA",
content=content,
ttl=3600,
prio=0,
disabled=0,
auth=1)
soaRecord.save()
## Main A record.
record = Records(domainOwner=zone,
domain_id=zone.id,
name=topLevelDomain,
type="A",
content=ipAddress,
ttl=3600,
prio=0,
disabled=0,
auth=1)
record.save()
# CNAME Records.
cNameValue = "www." + topLevelDomain
record = Records(domainOwner=zone,
domain_id=zone.id,
name=cNameValue,
type="CNAME",
content=topLevelDomain,
ttl=3600,
prio=0,
disabled=0,
auth=1)
record.save()
cNameValue = "ftp." + topLevelDomain
record = Records(domainOwner=zone,
domain_id=zone.id,
name=cNameValue,
type="CNAME",
content=topLevelDomain,
ttl=3600,
prio=0,
disabled=0,
auth=1)
record.save()
## MX Record.
mxValue = "mail." + topLevelDomain
record = Records(domainOwner=zone,
domain_id=zone.id,
name=topLevelDomain,
type="MX",
content=mxValue,
ttl=3600,
prio="10",
disabled=0,
auth=1)
record.save()
record = Records(domainOwner=zone,
domain_id=zone.id,
name=mxValue,
type="A",
content=ipAddress,
ttl=3600,
prio=0,
disabled=0,
auth=1)
record.save()
## TXT Records for mail
record = Records(domainOwner=zone,
domain_id=zone.id,
name=topLevelDomain,
type="TXT",
content="v=spf1 a mx ip4:" + ipAddress + " ~all",
ttl=3600,
prio=0,
disabled=0,
auth=1)
record.save()
record = Records(domainOwner=zone,
domain_id=zone.id,
name="_dmarc." + topLevelDomain,
type="TXT",
content="v=DMARC1; p=none",
ttl=3600,
prio=0,
disabled=0,
auth=1)
record.save()
record = Records(domainOwner=zone,
domain_id=zone.id,
name="_domainkey." + topLevelDomain,
type="TXT",
content="t=y; o=~;",
ttl=3600,
prio=0,
disabled=0,
auth=1)
record.save()
else:
if Domains.objects.filter(name=topLevelDomain).count() == 0:
try:
pdns = PDNSStatus.objects.get(pk=1)
if pdns.type == 'MASTER':
zone = Domains(admin=admin, name=topLevelDomain, type="MASTER")
else:
zone = Domains(admin=admin, name=topLevelDomain, type="NATIVE")
except:
zone = Domains(admin=admin, name=topLevelDomain, type="NATIVE")
zone.save()
content = "ns1." + topLevelDomain + " hostmaster." + topLevelDomain + " 1 10800 3600 604800 3600"
soaRecord = Records(domainOwner=zone,
domain_id=zone.id,
name=topLevelDomain,
type="SOA",
content=content,
ttl=3600,
prio=0,
disabled=0,
auth=1)
soaRecord.save()
## Main A record.
record = Records(domainOwner=zone,
domain_id=zone.id,
name=topLevelDomain,
type="A",
content=ipAddress,
ttl=3600,
prio=0,
disabled=0,
auth=1)
record.save()
# CNAME Records.
cNameValue = "www." + topLevelDomain
record = Records(domainOwner=zone,
domain_id=zone.id,
name=cNameValue,
type="CNAME",
content=topLevelDomain,
ttl=3600,
prio=0,
disabled=0,
auth=1)
record.save()
cNameValue = "ftp." + topLevelDomain
record = Records(domainOwner=zone,
domain_id=zone.id,
name=cNameValue,
type="CNAME",
content=topLevelDomain,
ttl=3600,
prio=0,
disabled=0,
auth=1)
record.save()
## MX Record.
mxValue = "mail." + topLevelDomain
record = Records(domainOwner=zone,
domain_id=zone.id,
name=topLevelDomain,
type="MX",
content=mxValue,
ttl=3600,
prio="10",
disabled=0,
auth=1)
record.save()
record = Records(domainOwner=zone,
domain_id=zone.id,
name=mxValue,
type="A",
content=ipAddress,
ttl=3600,
prio=0,
disabled=0,
auth=1)
record.save()
## TXT Records for mail
record = Records(domainOwner=zone,
domain_id=zone.id,
name=topLevelDomain,
type="TXT",
content="v=spf1 a mx ip4:" + ipAddress + " ~all",
ttl=3600,
prio=0,
disabled=0,
auth=1)
record.save()
record = Records(domainOwner=zone,
domain_id=zone.id,
name="_dmarc." + topLevelDomain,
type="TXT",
content="v=DMARC1; p=none",
ttl=3600,
prio=0,
disabled=0,
auth=1)
record.save()
record = Records(domainOwner=zone,
domain_id=zone.id,
name="_domainkey." + topLevelDomain,
type="TXT",
content="t=y; o=~;",
ttl=3600,
prio=0,
disabled=0,
auth=1)
record.save()
## Creating sub-domain level record.
zone = Domains.objects.get(name=topLevelDomain)
actualSubDomain = subDomain + "." + topLevelDomain
## Main A record.
DNS.createDNSRecord(zone, actualSubDomain, "A", ipAddress, 0, 3600)
# CNAME Records.
cNameValue = "www." + actualSubDomain
DNS.createDNSRecord(zone, cNameValue, "CNAME", actualSubDomain, 0, 3600)
if ProcessUtilities.decideDistro() == ProcessUtilities.ubuntu:
command = 'sudo systemctl restart pdns'
ProcessUtilities.executioner(command)
dns = DNS()
dns.cfTemplate(domain, admin)
except BaseException as msg:
logging.CyberCPLogFileWriter.writeToFile(
"We had errors while creating DNS records for: " + domain + ". Error message: " + str(msg))
@staticmethod
def createDKIMRecords(domain):
try:
import tldextract
extractDomain = tldextract.extract(domain)
topLevelDomain = extractDomain.domain + '.' + extractDomain.suffix
zone = Domains.objects.get(name=topLevelDomain)
path = "/etc/opendkim/keys/" + topLevelDomain + "/default.txt"
command = "cat " + path
output = subprocess.check_output(shlex.split(command)).decode("utf-8")
leftIndex = output.index('(') + 2
rightIndex = output.rindex(')') - 1
record = Records(domainOwner=zone,
domain_id=zone.id,
name="default._domainkey." + topLevelDomain,
type="TXT",
content=output[leftIndex:rightIndex],
ttl=3600,
prio=0,
disabled=0,
auth=1)
record.save()
if ProcessUtilities.decideDistro() == ProcessUtilities.ubuntu:
command = ' systemctl restart pdns'
ProcessUtilities.executioner(command)
## Add record to CF If sync enabled
dns = DNS()
dns.admin = zone.admin
if dns.loadCFKeys():
cf = CloudFlare.CloudFlare(email=dns.email, token=dns.key)
if dns.status == 'Enable':
try:
params = {'name': domain, 'per_page': 50}
zones = cf.zones.get(params=params)
for zone in sorted(zones, key=lambda v: v['name']):
zone = zone['id']
DNS.createDNSRecordCloudFlare(cf, zone, "default._domainkey." + topLevelDomain, 'TXT',
output[leftIndex:rightIndex], 0,
3600)
except CloudFlare.exceptions.CloudFlareAPIError as e:
logging.CyberCPLogFileWriter.writeToFile(str(e))
except Exception as e:
logging.CyberCPLogFileWriter.writeToFile(str(e))
except BaseException as msg:
logging.CyberCPLogFileWriter.writeToFile(
"We had errors while creating DKIM record for: " + domain + ". Error message: " + str(msg))
@staticmethod
def getZoneObject(virtualHostName):
try:
return Domains.objects.get(name=virtualHostName)
except:
return 0
@staticmethod
def createDNSRecordCloudFlare(cf, zone, name, type, value, priority, ttl):
try:
if value.find('DKIM') > -1:
value = value.replace('\n\t', '')
value = value.replace('"', '')
if ttl > 0:
dns_record = {'name': name, 'type': type, 'content': value, 'ttl': ttl, 'priority': priority}
else:
dns_record = {'name': name, 'type': type, 'content': value, 'priority': priority}
cf.zones.dns_records.post(zone, data=dns_record)
except BaseException as msg:
logging.CyberCPLogFileWriter.writeToFile(str(msg) + '. [createDNSRecordCloudFlare]')
@staticmethod
def createDNSRecord(zone, name, type, value, priority, ttl):
try:
if zone.type == 'MASTER':
getSOA = Records.objects.get(domainOwner=zone, type='SOA')
soaContent = getSOA.content.split(' ')
soaContent[2] = str(int(soaContent[2]) + 1)
getSOA.content = " ".join(soaContent)
getSOA.save()
if type == 'NS':
if Records.objects.filter(name=name, type=type, content=value).count() == 0:
record = Records(domainOwner=zone,
domain_id=zone.id,
name=name,
type=type,
content=value,
ttl=ttl,
prio=priority,
disabled=0,
auth=1)
record.save()
if ProcessUtilities.decideDistro() == ProcessUtilities.ubuntu:
command = 'sudo systemctl restart pdns'
ProcessUtilities.executioner(command)
return
if type == 'TXT':
if Records.objects.filter(name=name, type=type, content=value).count() == 0:
record = Records(domainOwner=zone,
domain_id=zone.id,
name=name,
type=type,
content=value,
ttl=ttl,
prio=priority,
disabled=0,
auth=1)
record.save()
if ProcessUtilities.decideDistro() == ProcessUtilities.ubuntu:
command = 'sudo systemctl restart pdns'
ProcessUtilities.executioner(command)
return
if type == 'MX':
record = Records(domainOwner=zone,
domain_id=zone.id,
name=name,
type=type,
content=value,
ttl=ttl,
prio=priority,
disabled=0,
auth=1)
record.save()
if ProcessUtilities.decideDistro() == ProcessUtilities.ubuntu:
command = 'sudo systemctl restart pdns'
ProcessUtilities.executioner(command)
return
if Records.objects.filter(name=name, type=type).count() == 0:
record = Records(domainOwner=zone,
domain_id=zone.id,
name=name,
type=type,
content=value,
ttl=ttl,
prio=priority,
disabled=0,
auth=1)
record.save()
if ProcessUtilities.decideDistro() == ProcessUtilities.ubuntu:
command = 'sudo systemctl restart pdns'
ProcessUtilities.executioner(command)
## Add Record to CF if SYNC Enabled
dns = DNS()
dns.admin = zone.admin
dns.loadCFKeys()
cf = CloudFlare.CloudFlare(email=dns.email, token=dns.key)
if dns.status == 'Enable':
try:
params = {'name': zone.name, 'per_page': 50}
zones = cf.zones.get(params=params)
for zone in sorted(zones, key=lambda v: v['name']):
zone = zone['id']
DNS.createDNSRecordCloudFlare(cf, zone, name, type, value, ttl, priority)
except CloudFlare.exceptions.CloudFlareAPIError as e:
logging.CyberCPLogFileWriter.writeToFile(str(e))
except Exception as e:
logging.CyberCPLogFileWriter.writeToFile(str(e))
except BaseException as msg:
logging.CyberCPLogFileWriter.writeToFile(str(msg) + " [createDNSRecord]")
@staticmethod
def deleteDNSZone(virtualHostName):
try:
delZone = Domains.objects.get(name=virtualHostName)
delZone.delete()
except:
## There does not exist a zone for this domain.
pass
@staticmethod
def createDNSZone(virtualHostName, admin):
try:
zone = Domains(admin=admin, name=virtualHostName, type="NATIVE")
zone.save()
except:
## There does not exist a zone for this domain.
pass
@staticmethod
def getDNSRecords(virtualHostName):
try:
zone = Domains.objects.get(name=virtualHostName)
zone.save()
return zone.records_set.all()
except:
## There does not exist a zone for this domain.
pass
@staticmethod
def getDNSZones():
try:
return Domains.objects.all()
except:
pass
@staticmethod
def deleteDNSRecord(recordID):
try:
delRecord = Records.objects.get(id=recordID)
delRecord.delete()
except:
## There does not exist a zone for this domain.
pass
| 40.425072
| 123
| 0.381322
| 1,939
| 28,055
| 5.489428
| 0.123775
| 0.0233
| 0.055806
| 0.071026
| 0.737317
| 0.72097
| 0.702837
| 0.699361
| 0.674371
| 0.663097
| 0
| 0.021272
| 0.542541
| 28,055
| 693
| 124
| 40.483405
| 0.808088
| 0.017359
| 0
| 0.790826
| 0
| 0
| 0.045583
| 0.006865
| 0
| 0
| 0
| 0
| 0
| 1
| 0.022018
| false
| 0.016514
| 0.020183
| 0
| 0.080734
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
cfc18e95cc36cf95c2b1ed6dcc859fe8492f6cbe
| 86
|
py
|
Python
|
test/input/051.py
|
EliRibble/pyfmt
|
e84a5531a7c06703eddd9dbc2072b0c8deae8c57
|
[
"MIT"
] | null | null | null |
test/input/051.py
|
EliRibble/pyfmt
|
e84a5531a7c06703eddd9dbc2072b0c8deae8c57
|
[
"MIT"
] | null | null | null |
test/input/051.py
|
EliRibble/pyfmt
|
e84a5531a7c06703eddd9dbc2072b0c8deae8c57
|
[
"MIT"
] | null | null | null |
import enum
THINGS = enum(A="apple", B="banana")
def getthings():
return THINGS
| 12.285714
| 36
| 0.662791
| 12
| 86
| 4.75
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.186047
| 86
| 6
| 37
| 14.333333
| 0.814286
| 0
| 0
| 0
| 0
| 0
| 0.127907
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0.25
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
cff1fde630141c16a99b4140461128485f1630bc
| 354
|
py
|
Python
|
tests/ast_parser/test_type_defs.py
|
FHPythonUtils/FHDoc
|
f027db02ef4cba6c0d7c2bbe105228aecfa8f15b
|
[
"MIT"
] | 1
|
2021-02-28T14:16:09.000Z
|
2021-02-28T14:16:09.000Z
|
tests/ast_parser/test_type_defs.py
|
FHPythonUtils/FHDoc
|
f027db02ef4cba6c0d7c2bbe105228aecfa8f15b
|
[
"MIT"
] | 1
|
2020-10-15T13:50:02.000Z
|
2020-10-15T13:50:52.000Z
|
tests/ast_parser/test_type_defs.py
|
FHPythonUtils/FHDoc
|
f027db02ef4cba6c0d7c2bbe105228aecfa8f15b
|
[
"MIT"
] | 1
|
2021-02-28T14:16:15.000Z
|
2021-02-28T14:16:15.000Z
|
# pylint: disable=missing-docstring
import unittest
from fhdoc.ast_parser import type_defs
class TestTypeDefs(unittest.TestCase):
def test_init(self):
self.assertTrue(type_defs.RenderExpr)
self.assertTrue(type_defs.Node)
self.assertTrue(type_defs.DirtyRenderExpr)
self.assertTrue(type_defs.ASTIterable)
self.assertTrue(type_defs.ASTImport)
| 25.285714
| 44
| 0.819209
| 46
| 354
| 6.130435
| 0.543478
| 0.170213
| 0.319149
| 0.390071
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090395
| 354
| 13
| 45
| 27.230769
| 0.875776
| 0.09322
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.555556
| 1
| 0.111111
| false
| 0
| 0.333333
| 0
| 0.555556
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
3211e974a5074a217be27ddef4faaeeb9e2100f1
| 93
|
py
|
Python
|
enthought/envisage/ui/action/group.py
|
enthought/etsproxy
|
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
|
[
"BSD-3-Clause"
] | 3
|
2016-12-09T06:05:18.000Z
|
2018-03-01T13:00:29.000Z
|
enthought/envisage/ui/action/group.py
|
enthought/etsproxy
|
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
|
[
"BSD-3-Clause"
] | 1
|
2020-12-02T00:51:32.000Z
|
2020-12-02T08:48:55.000Z
|
enthought/envisage/ui/action/group.py
|
enthought/etsproxy
|
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
|
[
"BSD-3-Clause"
] | null | null | null |
# proxy module
from __future__ import absolute_import
from envisage.ui.action.group import *
| 23.25
| 38
| 0.827957
| 13
| 93
| 5.538462
| 0.769231
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.11828
| 93
| 3
| 39
| 31
| 0.878049
| 0.129032
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
5c5aa797a8136a7c4eae93ec7bc8b85501e64a69
| 387
|
py
|
Python
|
underworld/libUnderworld/libUnderworldPy/__init__.py
|
longgangfan/underworld2
|
5c8acc17fa4d97e86a62b13b8bfb2af6e81a8ee4
|
[
"CC-BY-4.0"
] | 116
|
2015-09-28T10:30:55.000Z
|
2022-03-22T04:12:38.000Z
|
underworld/libUnderworld/libUnderworldPy/__init__.py
|
longgangfan/underworld2
|
5c8acc17fa4d97e86a62b13b8bfb2af6e81a8ee4
|
[
"CC-BY-4.0"
] | 561
|
2015-09-29T06:05:50.000Z
|
2022-03-22T23:37:29.000Z
|
underworld/libUnderworld/libUnderworldPy/__init__.py
|
longgangfan/underworld2
|
5c8acc17fa4d97e86a62b13b8bfb2af6e81a8ee4
|
[
"CC-BY-4.0"
] | 68
|
2015-12-14T21:57:46.000Z
|
2021-08-25T04:54:26.000Z
|
import numpy
from . import StGermain
from . import StgDomain
from . import StgFEM
from . import Solvers
from . import PICellerator
from . import Underworld
from . import gLucifer
#import ImportersToolbox
from . import c_arrays
from . import c_pointers
from . import StGermain_Tools
from . import Function
from . import petsc
try:
from . import LavaVu
except ImportError:
pass
| 18.428571
| 29
| 0.775194
| 50
| 387
| 5.94
| 0.44
| 0.43771
| 0.127946
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.183463
| 387
| 20
| 30
| 19.35
| 0.939873
| 0.059432
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.058824
| 0.882353
| 0
| 0.882353
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 5
|
5c648e7108ed7fb0c7f371fa42fd6919addf885c
| 38
|
py
|
Python
|
env/lib/python3.8/site-packages/plotly/graph_objs/layout/template/data/_treemap.py
|
acrucetta/Chicago_COVI_WebApp
|
a37c9f492a20dcd625f8647067394617988de913
|
[
"MIT",
"Unlicense"
] | 11,750
|
2015-10-12T07:03:39.000Z
|
2022-03-31T20:43:15.000Z
|
env/lib/python3.8/site-packages/plotly/graph_objs/layout/template/data/_treemap.py
|
acrucetta/Chicago_COVI_WebApp
|
a37c9f492a20dcd625f8647067394617988de913
|
[
"MIT",
"Unlicense"
] | 2,951
|
2015-10-12T00:41:25.000Z
|
2022-03-31T22:19:26.000Z
|
env/lib/python3.8/site-packages/plotly/graph_objs/layout/template/data/_treemap.py
|
acrucetta/Chicago_COVI_WebApp
|
a37c9f492a20dcd625f8647067394617988de913
|
[
"MIT",
"Unlicense"
] | 2,623
|
2015-10-15T14:40:27.000Z
|
2022-03-28T16:05:50.000Z
|
from plotly.graph_objs import Treemap
| 19
| 37
| 0.868421
| 6
| 38
| 5.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.105263
| 38
| 1
| 38
| 38
| 0.941176
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
5c94c10e231179b850a41b706d4093a3ad5b0196
| 196
|
py
|
Python
|
tests/test_em.py
|
hoffmann-muki/honours_project
|
151ad51a971be09ee69ff56aaff27e14e5106f7c
|
[
"MIT"
] | null | null | null |
tests/test_em.py
|
hoffmann-muki/honours_project
|
151ad51a971be09ee69ff56aaff27e14e5106f7c
|
[
"MIT"
] | null | null | null |
tests/test_em.py
|
hoffmann-muki/honours_project
|
151ad51a971be09ee69ff56aaff27e14e5106f7c
|
[
"MIT"
] | null | null | null |
from core_gpfa.em import em
def test_em(seq, params, kernSDList, minVarFrac):
# Run inference
(est_params, seq, LL, iterTime) = em(seq, params, kernSDList, minVarFrac)
return LL
| 24.5
| 77
| 0.693878
| 27
| 196
| 4.925926
| 0.62963
| 0.075188
| 0.165414
| 0.315789
| 0.466165
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.209184
| 196
| 8
| 78
| 24.5
| 0.858065
| 0.066327
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
7a47479025baeaffcd4d7b7052e65ef21db175e0
| 60
|
py
|
Python
|
lib/plivo/constants/__init__.py
|
goztrk/django-htk
|
c56bf112e5d627780d2f4288460eae5cce80fa9e
|
[
"MIT"
] | 206
|
2015-10-15T07:05:08.000Z
|
2021-02-19T11:48:36.000Z
|
lib/plivo/constants/__init__.py
|
goztrk/django-htk
|
c56bf112e5d627780d2f4288460eae5cce80fa9e
|
[
"MIT"
] | 8
|
2017-10-16T10:18:31.000Z
|
2022-03-09T14:24:27.000Z
|
lib/plivo/constants/__init__.py
|
goztrk/django-htk
|
c56bf112e5d627780d2f4288460eae5cce80fa9e
|
[
"MIT"
] | 61
|
2015-10-15T08:12:44.000Z
|
2022-03-10T12:25:06.000Z
|
# HTK Imports
from htk.lib.plivo.constants.general import *
| 20
| 45
| 0.783333
| 9
| 60
| 5.222222
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.116667
| 60
| 2
| 46
| 30
| 0.886792
| 0.183333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
7a6b03be18a04ee807a5609a4d83e04fee18acee
| 157
|
py
|
Python
|
jupyterlab/jupyter_lab_config.py
|
rsundqvist/rics
|
c67ff6703facb3170535dcf173d7e55734cedbc6
|
[
"MIT"
] | 1
|
2022-02-24T22:12:13.000Z
|
2022-02-24T22:12:13.000Z
|
jupyterlab/jupyter_lab_config.py
|
rsundqvist/rics
|
c67ff6703facb3170535dcf173d7e55734cedbc6
|
[
"MIT"
] | 26
|
2022-02-24T21:08:51.000Z
|
2022-03-19T19:55:26.000Z
|
jupyterlab/jupyter_lab_config.py
|
rsundqvist/rics
|
c67ff6703facb3170535dcf173d7e55734cedbc6
|
[
"MIT"
] | null | null | null |
# flake8: noqa
# Disable all security
c.NotebookApp.token = ""
c.NotebookApp.password = ""
c.NotebookApp.open_browser = True
c.NotebookApp.ip = "localhost"
| 19.625
| 33
| 0.738854
| 20
| 157
| 5.75
| 0.7
| 0.417391
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007299
| 0.127389
| 157
| 7
| 34
| 22.428571
| 0.832117
| 0.210191
| 0
| 0
| 0
| 0
| 0.07438
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.25
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
7a75331c3aef16ed1041ebca969120eb6e028522
| 83
|
py
|
Python
|
caches/dummy.py
|
caser789/libcache
|
94027d5ac15af8c7289df34b5a3ab9548590d987
|
[
"MIT"
] | null | null | null |
caches/dummy.py
|
caser789/libcache
|
94027d5ac15af8c7289df34b5a3ab9548590d987
|
[
"MIT"
] | null | null | null |
caches/dummy.py
|
caser789/libcache
|
94027d5ac15af8c7289df34b5a3ab9548590d987
|
[
"MIT"
] | null | null | null |
from .base import Base
class Dummy(Base):
"""Dummy cache for testing
"""
| 11.857143
| 30
| 0.626506
| 11
| 83
| 4.727273
| 0.727273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.253012
| 83
| 6
| 31
| 13.833333
| 0.83871
| 0.277108
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
7a7f3821ac6a40964e6e7173fad49eca11589328
| 30
|
py
|
Python
|
kalmann/__init__.py
|
Suhwan-Dev/kalmaNN
|
5ac85076a9b544ddb6dbe00cbf0bbf0727aca81e
|
[
"MIT"
] | 54
|
2017-05-12T21:52:56.000Z
|
2022-03-30T06:52:51.000Z
|
kalmann/__init__.py
|
glingi/kalmaNN
|
e191ed3b8d6492629b6c8432985e98300eb90ae3
|
[
"MIT"
] | 8
|
2017-04-29T03:38:07.000Z
|
2022-03-24T20:02:44.000Z
|
kalmann/__init__.py
|
glingi/kalmaNN
|
e191ed3b8d6492629b6c8432985e98300eb90ae3
|
[
"MIT"
] | 19
|
2017-04-29T03:36:29.000Z
|
2021-11-02T16:40:44.000Z
|
from knn import KNN, load_knn
| 15
| 29
| 0.8
| 6
| 30
| 3.833333
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 30
| 1
| 30
| 30
| 0.92
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
7aaced7a5e7765017bc743680f5f300262d0556b
| 42
|
py
|
Python
|
Pushbullet/setup.py
|
Jwy-Leo/Tool
|
bc02a2c1b450d41a2505d61551e9959359d8640b
|
[
"MIT"
] | 5
|
2018-04-24T11:44:53.000Z
|
2020-01-02T05:58:30.000Z
|
Pushbullet/setup.py
|
Jwy-Leo/Tool
|
bc02a2c1b450d41a2505d61551e9959359d8640b
|
[
"MIT"
] | null | null | null |
Pushbullet/setup.py
|
Jwy-Leo/Tool
|
bc02a2c1b450d41a2505d61551e9959359d8640b
|
[
"MIT"
] | null | null | null |
import os
os.system('pip install pycurl')
| 14
| 31
| 0.761905
| 7
| 42
| 4.571429
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.119048
| 42
| 2
| 32
| 21
| 0.864865
| 0
| 0
| 0
| 0
| 0
| 0.428571
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
7ab7b442ad80a994153c22d9ec8f197f06408aa6
| 218
|
py
|
Python
|
pyrela/common_utils/__init__.py
|
facebookresearch/rela
|
0bd091576921b8f060b73159f8393a8e13aa6227
|
[
"MIT"
] | 93
|
2019-10-25T20:10:47.000Z
|
2022-03-14T15:28:57.000Z
|
pyrela/common_utils/__init__.py
|
facebookresearch/rela
|
0bd091576921b8f060b73159f8393a8e13aa6227
|
[
"MIT"
] | 1
|
2020-05-16T08:18:46.000Z
|
2020-05-16T08:18:46.000Z
|
pyrela/common_utils/__init__.py
|
facebookresearch/rela
|
0bd091576921b8f060b73159f8393a8e13aa6227
|
[
"MIT"
] | 10
|
2019-11-29T23:48:10.000Z
|
2022-03-24T07:42:22.000Z
|
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
from .assert_utils import *
from .helper import *
from .logger import *
from .multi_counter import MultiCounter
from .stopwatch import Stopwatch
| 24.222222
| 70
| 0.784404
| 29
| 218
| 5.827586
| 0.689655
| 0.177515
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.151376
| 218
| 8
| 71
| 27.25
| 0.913514
| 0.311927
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
8fdd0767a17a16319970e5ba9058986d5e6c9719
| 48
|
py
|
Python
|
pma_survey_hub/utils.py
|
joeflack4/PMA-Survey-Hub
|
9d134240f813143bda01aea97db760c47143d1ec
|
[
"MIT"
] | null | null | null |
pma_survey_hub/utils.py
|
joeflack4/PMA-Survey-Hub
|
9d134240f813143bda01aea97db760c47143d1ec
|
[
"MIT"
] | 52
|
2018-08-14T20:09:50.000Z
|
2019-04-24T18:31:20.000Z
|
pma_survey_hub/utils.py
|
PMA-2020/PMA-Survey-Hub
|
9d134240f813143bda01aea97db760c47143d1ec
|
[
"MIT"
] | null | null | null |
"""Assortment of utilities for application."""
| 16
| 46
| 0.729167
| 5
| 48
| 7
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 48
| 2
| 47
| 24
| 0.833333
| 0.833333
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
8f564cc90d28bc5914fbc23aa41acc1d63cee82d
| 58
|
py
|
Python
|
src/model/backbone/__init__.py
|
hezl1592/deeplabv3-_mobilenetv2
|
cc3134d54bd84f700531c230353228541a7336f5
|
[
"MIT"
] | null | null | null |
src/model/backbone/__init__.py
|
hezl1592/deeplabv3-_mobilenetv2
|
cc3134d54bd84f700531c230353228541a7336f5
|
[
"MIT"
] | null | null | null |
src/model/backbone/__init__.py
|
hezl1592/deeplabv3-_mobilenetv2
|
cc3134d54bd84f700531c230353228541a7336f5
|
[
"MIT"
] | null | null | null |
from .mobilenetv2 import MobileNetV2, MobileNetV2_2Feature
| 58
| 58
| 0.896552
| 6
| 58
| 8.5
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.074074
| 0.068966
| 58
| 1
| 58
| 58
| 0.87037
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
8f58c5758324b7b2a3da27a5eec942f6cd5b6fc7
| 143
|
py
|
Python
|
kapre/__init__.py
|
timgates42/kapre
|
e0fffbbd2f9a8d1bcc4d337d15389d059646b2a8
|
[
"MIT"
] | null | null | null |
kapre/__init__.py
|
timgates42/kapre
|
e0fffbbd2f9a8d1bcc4d337d15389d059646b2a8
|
[
"MIT"
] | null | null | null |
kapre/__init__.py
|
timgates42/kapre
|
e0fffbbd2f9a8d1bcc4d337d15389d059646b2a8
|
[
"MIT"
] | null | null | null |
__version__ = '0.3.2'
VERSION = __version__
from . import composed
from . import backend
from .signal import *
from .time_frequency import *
| 15.888889
| 29
| 0.748252
| 19
| 143
| 5.157895
| 0.578947
| 0.204082
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.02521
| 0.167832
| 143
| 8
| 30
| 17.875
| 0.798319
| 0
| 0
| 0
| 0
| 0
| 0.034965
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
8f626d8fc7a9a17ec77d6dc62ee74b9e4074b630
| 755
|
py
|
Python
|
manga_py/providers/mangakakalot_com.py
|
paulolimac/manga-py
|
3d180846750a4e770b5024eb8cd15629362875b1
|
[
"MIT"
] | null | null | null |
manga_py/providers/mangakakalot_com.py
|
paulolimac/manga-py
|
3d180846750a4e770b5024eb8cd15629362875b1
|
[
"MIT"
] | null | null | null |
manga_py/providers/mangakakalot_com.py
|
paulolimac/manga-py
|
3d180846750a4e770b5024eb8cd15629362875b1
|
[
"MIT"
] | null | null | null |
from manga_py.provider import Provider
from .helpers.std import Std
class MangaKakalotCom(Provider, Std):
def get_chapter_index(self) -> str:
re = self.re.search('/chapter_([^/]+)', self.chapter)
return re.group(1).replace('.', '-', 2)
def get_main_content(self):
return self._get_content('{}/manga/{}')
def get_manga_name(self) -> str:
return self._get_name('/(?:manga|chapter)/([^/]+)/?')
def get_chapters(self):
return self._elements('.chapter-list span a')
def get_files(self):
result = self.html_fromstring(self.chapter, '#vungdoc img')
return [i.get('src') for i in result]
def book_meta(self) -> dict:
# todo meta
pass
main = MangaKakalotCom
| 25.166667
| 67
| 0.618543
| 97
| 755
| 4.639175
| 0.463918
| 0.066667
| 0.062222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.003413
| 0.223841
| 755
| 29
| 68
| 26.034483
| 0.764505
| 0.011921
| 0
| 0
| 0
| 0
| 0.123656
| 0.037634
| 0
| 0
| 0
| 0.034483
| 0
| 1
| 0.333333
| false
| 0.055556
| 0.111111
| 0.166667
| 0.777778
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 5
|
8f6a7a77fbbf259a77b3c9e9c8a9675c6ae23381
| 39
|
py
|
Python
|
exchange-calendar/__init__.py
|
ardrahan/home-assistant-exchange-calendar
|
4aed3f9d0a060f28603a1b73b50618272f8159d2
|
[
"MIT"
] | 1
|
2022-01-23T11:22:39.000Z
|
2022-01-23T11:22:39.000Z
|
exchange-calendar/__init__.py
|
ardrahan/home-assistant-exchange-calendar
|
4aed3f9d0a060f28603a1b73b50618272f8159d2
|
[
"MIT"
] | 1
|
2022-01-24T22:11:05.000Z
|
2022-01-27T22:22:44.000Z
|
exchange-calendar/__init__.py
|
ardrahan/home-assistant-exchange-calendar
|
4aed3f9d0a060f28603a1b73b50618272f8159d2
|
[
"MIT"
] | 1
|
2021-10-02T21:09:00.000Z
|
2021-10-02T21:09:00.000Z
|
"""The exchange_calendar component."""
| 19.5
| 38
| 0.74359
| 4
| 39
| 7
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.076923
| 39
| 1
| 39
| 39
| 0.777778
| 0.820513
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
8f71078f8db74f286384458bace8e4a29f717909
| 62
|
py
|
Python
|
alert_stream_crossmatch/__init__.py
|
dirac-institute/alert_stream_crossmatch
|
84b88fc8b9590c01bcd98c8229f7925081f65619
|
[
"BSD-3-Clause"
] | null | null | null |
alert_stream_crossmatch/__init__.py
|
dirac-institute/alert_stream_crossmatch
|
84b88fc8b9590c01bcd98c8229f7925081f65619
|
[
"BSD-3-Clause"
] | 5
|
2021-07-19T21:38:19.000Z
|
2022-03-31T23:12:32.000Z
|
alert_stream_crossmatch/__init__.py
|
dirac-institute/alert_stream_crossmatch
|
84b88fc8b9590c01bcd98c8229f7925081f65619
|
[
"BSD-3-Clause"
] | null | null | null |
from .ztf_rosat_crossmatch import *
from .constants import *
| 15.5
| 35
| 0.790323
| 8
| 62
| 5.875
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.145161
| 62
| 3
| 36
| 20.666667
| 0.886792
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
56d454f0ac48108e9f74ae7092cfa7dacda7d500
| 117
|
py
|
Python
|
agrirouter/constants/media_types.py
|
DKE-Data/agrirouter-sdk-python
|
6d6b26606f7d424c62289af56da55acf412772fc
|
[
"Apache-2.0"
] | null | null | null |
agrirouter/constants/media_types.py
|
DKE-Data/agrirouter-sdk-python
|
6d6b26606f7d424c62289af56da55acf412772fc
|
[
"Apache-2.0"
] | null | null | null |
agrirouter/constants/media_types.py
|
DKE-Data/agrirouter-sdk-python
|
6d6b26606f7d424c62289af56da55acf412772fc
|
[
"Apache-2.0"
] | null | null | null |
from agrirouter.auth.enums import BaseEnum
class ContentTypes(BaseEnum):
APPLICATION_JSON = "application/json"
| 19.5
| 42
| 0.794872
| 13
| 117
| 7.076923
| 0.769231
| 0.326087
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.128205
| 117
| 5
| 43
| 23.4
| 0.901961
| 0
| 0
| 0
| 0
| 0
| 0.136752
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
56db50eb470859a308f042e108bd95443a07d292
| 75
|
py
|
Python
|
discordEasy/utils/__init__.py
|
LeConstellationniste/DiscordFramework
|
24d4b9b7cb0a21d3cec9d5362ab0828c5e15a3af
|
[
"CC0-1.0"
] | 1
|
2021-01-27T14:55:03.000Z
|
2021-01-27T14:55:03.000Z
|
discordEasy/utils/__init__.py
|
LeConstellationniste/DiscordFramework
|
24d4b9b7cb0a21d3cec9d5362ab0828c5e15a3af
|
[
"CC0-1.0"
] | null | null | null |
discordEasy/utils/__init__.py
|
LeConstellationniste/DiscordFramework
|
24d4b9b7cb0a21d3cec9d5362ab0828c5e15a3af
|
[
"CC0-1.0"
] | null | null | null |
from .logs import Logs
from .strUtils import *
from .discordUtils import *
| 18.75
| 27
| 0.773333
| 10
| 75
| 5.8
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.16
| 75
| 3
| 28
| 25
| 0.920635
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
56f8dc279d9189efef202206ed60927a96162164
| 44
|
py
|
Python
|
spotty/errors/nothing_to_do.py
|
greglira/spotty
|
0b5073621ba8e19be75b6f9701e6c9971b6d17fb
|
[
"MIT"
] | 246
|
2018-09-03T09:09:48.000Z
|
2020-07-18T21:07:15.000Z
|
spotty/errors/nothing_to_do.py
|
greglira/spotty
|
0b5073621ba8e19be75b6f9701e6c9971b6d17fb
|
[
"MIT"
] | 42
|
2018-10-09T19:41:56.000Z
|
2020-06-15T22:55:58.000Z
|
spotty/errors/nothing_to_do.py
|
greglira/spotty
|
0b5073621ba8e19be75b6f9701e6c9971b6d17fb
|
[
"MIT"
] | 27
|
2018-10-09T22:16:40.000Z
|
2020-06-08T22:26:00.000Z
|
class NothingToDoError(Exception):
pass
| 14.666667
| 34
| 0.772727
| 4
| 44
| 8.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.159091
| 44
| 2
| 35
| 22
| 0.918919
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
7113567a175311365a037fd11e6324c0e03ec0ed
| 129
|
py
|
Python
|
algorithms/989. Add to Array-Form of Integer.py
|
vuzway9132/leetcode
|
e51a9ce7a6bb3e35c0fcb8c8f4f6cd5763708dbf
|
[
"MIT"
] | 1
|
2020-12-02T13:54:30.000Z
|
2020-12-02T13:54:30.000Z
|
algorithms/989. Add to Array-Form of Integer.py
|
vuzway9132/leetcode
|
e51a9ce7a6bb3e35c0fcb8c8f4f6cd5763708dbf
|
[
"MIT"
] | null | null | null |
algorithms/989. Add to Array-Form of Integer.py
|
vuzway9132/leetcode
|
e51a9ce7a6bb3e35c0fcb8c8f4f6cd5763708dbf
|
[
"MIT"
] | null | null | null |
class Solution:
def addToArrayForm(self, A: List[int], K: int) -> List[int]:
return list(str(int(''.join(map(str, A)))+K))
| 32.25
| 62
| 0.627907
| 21
| 129
| 3.857143
| 0.619048
| 0.17284
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.147287
| 129
| 3
| 63
| 43
| 0.736364
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
7137ad9a930644f21636087dd83262fd3310e7ab
| 206
|
py
|
Python
|
50/main3.py
|
xiaomiwujiecao/effectivePythonNote
|
772e81864b171dcc19b6bcb1b31fc61bcfa1b9fe
|
[
"MIT"
] | null | null | null |
50/main3.py
|
xiaomiwujiecao/effectivePythonNote
|
772e81864b171dcc19b6bcb1b31fc61bcfa1b9fe
|
[
"MIT"
] | null | null | null |
50/main3.py
|
xiaomiwujiecao/effectivePythonNote
|
772e81864b171dcc19b6bcb1b31fc61bcfa1b9fe
|
[
"MIT"
] | null | null | null |
from analysis.utils import inspect as analysis_inspect
from frontend.utils import inspect as frontend_inspect
value =33
if analysis_inspect(value)==frontend_inspect(value):
print('Inspection equal')
| 17.166667
| 54
| 0.81068
| 28
| 206
| 5.821429
| 0.464286
| 0.220859
| 0.220859
| 0.245399
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.01105
| 0.121359
| 206
| 11
| 55
| 18.727273
| 0.889503
| 0
| 0
| 0
| 0
| 0
| 0.078818
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.4
| 0
| 0.4
| 0.2
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
8534845c875781fd37b40a4e57a767bbadf63e35
| 244
|
py
|
Python
|
the_artificial_experience/pipeline/syn_env_wrapper.py
|
JacobFV/artificial-experience
|
8f3f1b5b2ad40f1d2e8bdda879d7c5c2b75d37dd
|
[
"MIT"
] | 1
|
2022-03-05T02:44:27.000Z
|
2022-03-05T02:44:27.000Z
|
the_artificial_experience/pipeline/syn_env_wrapper.py
|
JacobFV/artificial-experience
|
8f3f1b5b2ad40f1d2e8bdda879d7c5c2b75d37dd
|
[
"MIT"
] | null | null | null |
the_artificial_experience/pipeline/syn_env_wrapper.py
|
JacobFV/artificial-experience
|
8f3f1b5b2ad40f1d2e8bdda879d7c5c2b75d37dd
|
[
"MIT"
] | null | null | null |
from base_env import Env
class SynEnv(Env):
class EnvTransitionFunctions:
@staticmethod
def eachStep(curr_env, all_envs):
pass
@staticmethod
def whenDone(curr_env, all_envs):
pass
| 17.428571
| 41
| 0.606557
| 26
| 244
| 5.5
| 0.576923
| 0.111888
| 0.13986
| 0.195804
| 0.251748
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.336066
| 244
| 13
| 42
| 18.769231
| 0.882716
| 0
| 0
| 0.444444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.222222
| false
| 0.222222
| 0.111111
| 0
| 0.555556
| 0
| 1
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 5
|
857bd12c41a4ce7d2f870f462f289bf2e9f77bb6
| 23
|
py
|
Python
|
examples/libtest/imports/cls1.py
|
allbuttonspressed/pyjs
|
c726fdead530eb63ee4763ae15daaa58d84cd58f
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
examples/libtest/imports/cls1.py
|
allbuttonspressed/pyjs
|
c726fdead530eb63ee4763ae15daaa58d84cd58f
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
examples/libtest/imports/cls1.py
|
allbuttonspressed/pyjs
|
c726fdead530eb63ee4763ae15daaa58d84cd58f
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2019-11-18T14:17:59.000Z
|
2019-11-18T14:17:59.000Z
|
from .cls import CLS
| 5.75
| 20
| 0.695652
| 4
| 23
| 4
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.26087
| 23
| 3
| 21
| 7.666667
| 0.941176
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
85af258ed855c98a7bd357cdfcec35d702a9cf3b
| 304
|
py
|
Python
|
test/fixtures/sequence_fixtures.py
|
undeadpixel/mallet
|
86ca17b72e7a24bc898a4a49076609dfd38a7d2c
|
[
"MIT"
] | 1
|
2016-07-15T13:03:13.000Z
|
2016-07-15T13:03:13.000Z
|
test/fixtures/sequence_fixtures.py
|
undeadpixel/mallet
|
86ca17b72e7a24bc898a4a49076609dfd38a7d2c
|
[
"MIT"
] | null | null | null |
test/fixtures/sequence_fixtures.py
|
undeadpixel/mallet
|
86ca17b72e7a24bc898a4a49076609dfd38a7d2c
|
[
"MIT"
] | null | null | null |
import mallet.sequence as seq
def sequences():
return [
seq.Sequence("S1", "ABCABCABC"),
seq.Sequence("S2", "AAAAA"),
seq.Sequence("S3", "CBCBCBCBC")
]
def long_sequences():
return [
seq.Sequence("L1", "A"*800),
seq.Sequence("L2", "ABCA"*123),
]
| 20.266667
| 40
| 0.542763
| 34
| 304
| 4.823529
| 0.617647
| 0.335366
| 0.219512
| 0.317073
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.04955
| 0.269737
| 304
| 14
| 41
| 21.714286
| 0.689189
| 0
| 0
| 0.166667
| 0
| 0
| 0.125
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| true
| 0
| 0.083333
| 0.166667
| 0.416667
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
|
0
| 5
|
85c3764e499ad6b7122c591c720573026d7b71e0
| 12,479
|
py
|
Python
|
tests/marc_records.py
|
seanredmond/py-marc-data
|
c13f44886a60822196374ea9bb62fe595416461a
|
[
"MIT"
] | 1
|
2021-01-05T05:25:06.000Z
|
2021-01-05T05:25:06.000Z
|
tests/marc_records.py
|
seanredmond/py-marc-data
|
c13f44886a60822196374ea9bb62fe595416461a
|
[
"MIT"
] | null | null | null |
tests/marc_records.py
|
seanredmond/py-marc-data
|
c13f44886a60822196374ea9bb62fe595416461a
|
[
"MIT"
] | null | null | null |
REC1 = b"""cam a22002051 4500001001300000003000400013005001700017008004100034010001700075035001900092040001800111050001600129100003500145245017600180260004300356300001900399500002600418650002100444650004900465 00000002 DLC20040505165105.0800108s1899 ilu 000 0 eng a 00000002 a(OCoLC)5853149 aDLCcDSIdDLC00aRX671b.A921 aAurand, Samuel Herbert,d1854-10aBotanical materia medica and pharmacology;bdrugs considered from a botanical, pharmaceutical, physiological, therapeutical and toxicological standpoint.cBy S. H. Aurand. aChicago,bP. H. Mallen Company,c1899. a406 p.c24 cm. aHomeopathic formulae. 0aBotany, Medical. 0aHomeopathyxMateria medica and therapeutics."""
REC_MX = b"""cpc a2200733 a 4500001001300000003000400013005001700017007000700034007001000041007000700051007000700058008004100065010001700106040002400123043001200147050001700159100005500176245003400231246004300265246005400308260001000362300006000372300006300432300005900495300006000554300004500614300006100659351073000720520069901450520033602149524010102485540008002586545032702666500012702993500007303120541011503193650005103308650004703359650005303406650006003459650006903519650005803588650004803646650006103694650006003755650005703815650005803872650006003930650006403990650005904054650006804113650006904181650006404250655004204314655004404356655003904400655004504439655005304484655004504537655005004582655005004632852009104682856010804773 00650024 DLC20101209130102.0kh|bo|gt|cj||s kg|b||kh|c||000724i19802005xxu eng a 00650024 aDLCcDLCdDLCegihc an-us-dc00aGuide Record1 aHighsmith, Carol M.,d1946-ephotographer,edonor.10aHighsmith archiveh[graphic].33aCarol M. Highsmith archiveh[graphic].33aCarol M. Highsmith photograph archiveh[graphic]. c1980- a276 photographic prints :bgelatin silver ;c8 x 10 in. a241 photographic prints :bcibachrome, color ;c8 x 10 in. a824 transparencies :bfilm, color ;cchiefly 4 x 5 in. a1213 negatives :bsafety film, b&w ;cchiefly 4 x 5 in. a2 photographs (digital prints) :bcolor. a11,271 photographs :bdigital files, TIFF, mostly color. aFilm negatives and transparencies are organized by type of film and size in the following filing series: LC-HS503 (4x5 color transparencies), LC-HS505 (120 color slides), LC-HS507 (120 color slides, mounted), LC-HS513 (4x5 negatives), LC-HS543 (4x5 color negatvies) and LC-HS545 (120 color negatives) . Within each filing series negatives and transparencies are arranged numerically by a one-up number assigned by the photographer. Photographic prints or digital scans serve as reference copies for the negatives and transparencies and are organized by project or subject into groups with the call number designation LOT. Each LOT is cataloged separately and linked through the collection title: Carol M. Highsmith archive.0 aThe archive consists primarily of photographs documenting buildings, urban renewal efforts, and historic preservation. Many of the photographs document the Washington, D.C. area. Projects for the General Services Administration (GSA) show US government buildings through the United States. Projects for the Urban Land Institute document urban settings such as San Antonio, Texas and the greater Los Angeles, California region. Also included are photographs of President Ronald Reagan meeting with Republican Senatorial candidates and photographs of Lexington, Virginia. In addition, there are two photographs taken near the crash site of United Airlines Flight 93 in Shanksville, Pennsylvania.0 aIn 2007, the photographer began to add born digital photographs to the archive, beginning with a large project documenting the Library of Congress buildings; continuing the GSA building documentation; and in 2009 launching the Carol M. Highsmith's America project to document each state in the United States, starting with Alabama.8 aPublished images must bear the credit line: The Library of Congress, Carol M. Highsmith Archive. aNo known restrictions on publication. Photographs are in the public domain. aDistinguished architectural photographer, based in Washington, D.C., Highsmith documents architecture and architectural renovation projects in the nation's capitol and throughout the United States. She bases her career on the work of noted documentary and architectural photographer Frances Benjamin Johnston (1864-1952). aThis archive is open-ended; future gifts are expected. The catalog record will be updated as new accessions are processed. aCollection includes Highsmith's captions which accompany the images. cGift;aCarol M. Highsmith;d1992, 1994, 2002;e(DLC/PP-1992:189, DLC/PP-1994:020, DLC/PP-2002:038), and later. 7aArchitecturezUnited Statesy1980-2010.2lctgm 7aChurcheszUnited Statesy1980-2010.2lctgm 7aCities & townszUnited Statesy1980-2010.2lctgm 7aCommercial facilitieszUnited Statesy1980-2010.2lctgm 7aConservation & restorationzWashington (D.C.)y1980-2000.2lctgm 7aCultural facilitieszUnited Statesy1980-2010.2lctgm 7aDwellingszUnited Statesy1980-2010.2lctgm 7aEducational facilitieszUnited Statesy1980-2010.2lctgm 7aGovernment facilitieszUnited Statesy1980-2010.2lctgm 7aHistoric buildingszUnited Statesy1980-2010.2lctgm 7aMilitary facilitieszUnited Statesy1980-2010.2lctgm 7aMonuments & memorialszUnited Statesy1980-2010.2lctgm 7aSocial & civic facilitieszUnited Statesy1980-2010.2lctgm 7aParades & processionszUnited Statesy1980-20102lctgm 7aPresidents & the CongresszWashington (D.C.)y1980-1990.2lctgm 7aSports & recreation facilitieszUnited Statesy1980-2010.2lctgm 7aTransportation facilitieszUnited Statesy1980-2010.2lctgm 7aAerial photographsy1980-2010.2gmgpc 7aPortrait photographsy1980-1990.2gmgpc 7aGroup portraitsy1980-1990.2gmgpc 7aGelatin silver printsy1980-2000.2gmgpc 7aDye destruction printsxColory1980-2000.2gmgpc 7aSafety film negativesy1980-2010.2gmgpc 7aFilm transparenciesxColory1980-2010.2gmgpc 7aDigital photographsxColory2000-2010.2gmgpc aLibrary of CongressbPrints and Photographs DivisioneWashington, D.C., 20540 USAndcu41zSearch for Highsmith items in Prints & Photographs Online Cataloguhttp://hdl.loc.gov/loc.pnp/pp.highsm"""
REC_MP = b"""cem a22002531 4500001001300000003000400013005001700017007000900034008004100043010001700084035002000101040002600121043001200147050002300159100003500182245017700217255001900394260004600413300003200459505012300491651002800614650005100642710002300693 00004452 DLC20121013005559.0ad canzn940812m18981906pau e eng a 00004452 a(OCoLC)30935992 aDLCcPPiHidPPiDdDLC an-us-pa00aG1264.P6bH62 18981 aHopkins, Griffith Morgan,cJr.10aReal estate plat-book of the city of Pittsburgh :bfrom official records, private plans and actual surveys /cconstructed under the direction and published by G.M. Hopkins. aScales differ. aPhiladelphia :bG.M. Hopkins,c1898-1906. a4 v. :bcol. maps ;c59 cm.0 av.1. 13th-14th, 22nd-23rd wards -- v. 2. 18th-21st, 37th wards -- v. 3 6th-12th, 15th-17th wards -- v. 4. 1st-5th, 7th 0aPittsburgh (Pa.)vMaps. 0aReal propertyzPennsylvaniazPittsburghvMaps.2 aG.M. Hopkins & Co."""
REC_CF = b"""cmm a2200349 a 4500001001300000003000400013005001700017007001500034008004100049010001700090020001500107040001300122050001000135082001400145245013600159256003100295260007500326300004100401490005900442538024400501538020300745500002700948521004300975520012701018650002001145650001701165700002401182700002501206700002201231710004901253830007701302 00021631 DLC20030624102241.0co |||||||||||000110s2000 ohu f m eng a 00021631 a1571170383 aDLCcDLC00aTA16510a620.121300aLeak testing CD-ROMh[computer file] /ctechnical editors, Charles N. Jackson, Jr., Charles N. Sherlock ; editor, Patrick O. Moore. aComputer data and program. aColumbus, Ohio :bAmerican Society for Nondestructive Testing,cc2000. a1 computer optical disc ;c4 3/4 in.1 aNondestructive testing handbook. Third edition ;vv. 1 aSystem requirements for Windows: i486 or Pentium processor-based PC; 8MB RAM on Windows 95 or 98 (16MB recommended); 16MB RAM on Windows NT (24MB recommended); Microsoft Windows 95, 98, or NT 4.0 with Service Pack 3 or later; CD-ROM drive. aSystem requirements for Macintosh: Apple Power Macintosh ; 4.5MB RAM available to Acrobat Reader (6.5 recommended); Apple System Software 7.1.2 or later; 8MB available hard disk space; CD-ROM drive. aTitle from disc label. aQuality control engineers, inspectors. aTheory and application of nondestructive tests for characterization and inspection of industrial materials and components. 0aLeak detectors. 0aGas leakage.1 aJackson, Charles N.1 aSherlock, Charles N.1 aMoore, Patrick O.2 aAmerican Society for Nondestructive Testing. 0aNondestructive testing handbook (3rd ed. : Electronic resource) ;vv. 1."""
REC_MU = b"""cjm a22002771a 4500001001300000003000400013005001700017007001500034008004100049050001400090010001700104020001100121024001700132028003400149035002300183040001800206042001300224100002400237245005000261260004700311300004100358500001800399511006100417505038100478650001600859 00000838 DLC20030506181700.0sd|zsngnnmmned000824s1998 nyuppn d00aSDA 16949 a 00000838 c$15.981 a60121531262102aUPTD 53126bUniversal Records a(OCoLC)ocm39655785 aOCOcOCOdDLC alcderive0 aMcGruffc(Musician)10aDestined to beh[sound recording] /cMcGruff. aNew York, NY :bUniversal Records,cp1998. a1 sound disc :bdigital ;c4 3/4 in. aCompact disc.0 aRap music performed by McGruff ; with accomp. musicians.0 aGruff express -- Harlem kidz get biz -- This is how we do -- Many know -- Exquisite -- The spot (interlude) -- What part of the game -- Who holds his own -- What cha doin' to me -- Destined to be -- Freestyle -- Dangerzone -- What you want -- Before we start -- Reppin' uptown (featuring The Lox) -- The signing (interlude) -- Stop it -- Before we start (remix) (bonus track). 0aRap (Music)"""
REC_CR = b"""nas a2200157 a 4500001001300000003000400013005001700017008004100034010001700075035002000092040001300112042000700125245008500132260007400217500012100291 00000000 DLC19940915171908.0940906u19949999dcuuu 0 0eng a 00000000 a(OCoLC)31054590 aDLCcDLC alc00aOnline test record /cJohn D. Levy, Serial Record Division, Library of Congress. aWashington, DC :bLibrary of Congress, Serial Record Division,c1994- aThis record provides a marker in the LC MUMS ONUM index that will prevent other records with this LCCN from loading."""
REC_VM = b"""cgm a22001817a 4500001001300000003000400013005001700017007002400034008004100058010001700099040001300116050003000129245008400159260004000243300005900283541008300342710005200425 00270273 DLC20000412151520.0m 991028s xxu v|eng a 00270273 aDLCcDLC00aCGC 9600-9605 (ref print)00aAfflictionh[motion picture] /cLargo Entertainment; directed by Paul Schrader. aU.S. :bLargo Entertainment,c1997. a12r of 12 on 6 reels :bsd., col. ;c35 mm. ref print. dReceived: 3/24/00;3ref print;ccopyright deposit--407;aCopyright Collection.2 aCopyright Collection (Library of Congress)5DLC"""
REC_02 = b"""cem a22002893a 4500001001300000003000400013005001700017007000900034007000700043008004100050010003000091017003400121034001400155040001300169050002400182052000900206110003000215245003700245255002700282260001900309300003400328530007900362651002300441752001700464852008600481856010100567 00552205DLC20000621074203.0aj|canzncr||||000313s1915 xx d 0 eng a 00552205z 99446781 aF27747bU.S. Copyright Office0 aab140000 aDLCcDLC00aG4970 1915b.R3 TIL a49702 aRand McNally and Company.00aMap of the island of Porto Rico. aScale [ca. 1:140,000]. a[S.l.],c1915. a1 map :bcol. ;c71 x 160 cm. aAvailable also through the Library of Congress web site as a raster image. 0aPuerto RicovMaps. aPuerto Rico.0 aLibrary of CongressbGeography and Map DivisioneWashington, D.C. 20540-4650ndcu7 dg4970fct000492gurn:hdl:loc.gmd/g4970.ct000492uhttp://hdl.loc.gov/loc.gmd/g4970.ct0004922http"""
| 623.95
| 5,626
| 0.793894
| 1,329
| 12,479
| 7.44921
| 0.572611
| 0.021212
| 0.029697
| 0.030707
| 0.009293
| 0
| 0
| 0
| 0
| 0
| 0
| 0.326881
| 0.155461
| 12,479
| 19
| 5,627
| 656.789474
| 0.612487
| 0
| 0
| 0
| 0
| 1
| 0.988697
| 0.18493
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.25
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
a430cedf3108c9cdc0d9dab15dfe6761c28981d4
| 169
|
py
|
Python
|
irrhound/shared/__init__.py
|
stucchimax/IRRHound
|
998cee9574e75efe0decfbad912704274ce215df
|
[
"CC0-1.0"
] | 4
|
2021-10-05T12:41:31.000Z
|
2022-02-01T12:22:29.000Z
|
irrhound/shared/__init__.py
|
stucchimax/IRRHound
|
998cee9574e75efe0decfbad912704274ce215df
|
[
"CC0-1.0"
] | 2
|
2021-12-07T20:29:24.000Z
|
2021-12-13T15:52:18.000Z
|
irrhound/shared/__init__.py
|
stucchimax/IRRHound
|
998cee9574e75efe0decfbad912704274ce215df
|
[
"CC0-1.0"
] | 4
|
2021-10-04T07:37:41.000Z
|
2021-12-11T01:48:26.000Z
|
from .peer import Peer
from .irr_source_list import IRRSourceList
from .whois_proxy import WhoisProxy
from .route_object import RouteObject
from .irr_scan import IRRScan
| 33.8
| 42
| 0.857988
| 25
| 169
| 5.6
| 0.6
| 0.1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.112426
| 169
| 5
| 43
| 33.8
| 0.933333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
a48f67636476362c1f6a2e660dcc39a23b1df55a
| 227
|
py
|
Python
|
gooey/gui/util/taskkill.py
|
mayk93/Gooey
|
4b69ffac883e7928737f4d5c21ab14c390f9c090
|
[
"MIT"
] | 2
|
2018-10-10T14:58:44.000Z
|
2021-04-30T11:50:15.000Z
|
gooey/gui/util/taskkill.py
|
bastula/Gooey
|
65aeba7abc33fbed9a8ad991d1831c32d243d39a
|
[
"MIT"
] | null | null | null |
gooey/gui/util/taskkill.py
|
bastula/Gooey
|
65aeba7abc33fbed9a8ad991d1831c32d243d39a
|
[
"MIT"
] | null | null | null |
import sys
import os
import signal
if sys.platform.startswith("win"):
def taskkill(pid):
os.system('taskkill /F /PID {:d} /T >NUL 2>NUL'.format(pid))
else: # POSIX
def taskkill(pid):
os.kill(pid, signal.SIGTERM)
| 18.916667
| 64
| 0.669604
| 36
| 227
| 4.222222
| 0.611111
| 0.144737
| 0.184211
| 0.210526
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005319
| 0.171806
| 227
| 11
| 65
| 20.636364
| 0.803191
| 0.022026
| 0
| 0.222222
| 0
| 0
| 0.172727
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.222222
| false
| 0
| 0.333333
| 0
| 0.555556
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
8ef7de7687fd3ff58ee6f7605a59c10d9416e5da
| 23
|
py
|
Python
|
flag.py
|
OpenSauce04/automaton.py
|
3d9c97b6e08009c5f8bd106adb664dacf50bb147
|
[
"Unlicense"
] | 1
|
2020-12-04T11:03:32.000Z
|
2020-12-04T11:03:32.000Z
|
flag.py
|
OpenSauce04/automaton.py
|
3d9c97b6e08009c5f8bd106adb664dacf50bb147
|
[
"Unlicense"
] | null | null | null |
flag.py
|
OpenSauce04/automaton.py
|
3d9c97b6e08009c5f8bd106adb664dacf50bb147
|
[
"Unlicense"
] | null | null | null |
# Program flag pointer
| 11.5
| 22
| 0.782609
| 3
| 23
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173913
| 23
| 1
| 23
| 23
| 0.947368
| 0.869565
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
f116cf70d820c6c635342809554867ece1ff31f3
| 202
|
py
|
Python
|
tests/samples/invalid/FunctionAchievement.py
|
raviolliii/dev-achievements
|
da0a92f0d53b20192ef68ee1222cbf01fb0c0ed0
|
[
"MIT"
] | 12
|
2021-02-05T15:38:08.000Z
|
2021-09-04T21:19:03.000Z
|
tests/samples/invalid/FunctionAchievement.py
|
raviolliii/dev-achievements
|
da0a92f0d53b20192ef68ee1222cbf01fb0c0ed0
|
[
"MIT"
] | null | null | null |
tests/samples/invalid/FunctionAchievement.py
|
raviolliii/dev-achievements
|
da0a92f0d53b20192ef68ee1222cbf01fb0c0ed0
|
[
"MIT"
] | null | null | null |
# cases where FunctionAchievement should not unlock
# >> CASE
def test():
pass
# >> CASE
def func():
pass
func
# >> CASE
def func():
pass
f = func
f()
# >> CASE
func()
# >> CASE
func
| 8.08
| 51
| 0.569307
| 26
| 202
| 4.423077
| 0.461538
| 0.182609
| 0.191304
| 0.26087
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.282178
| 202
| 24
| 52
| 8.416667
| 0.793103
| 0.440594
| 0
| 0.636364
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.272727
| false
| 0.272727
| 0
| 0
| 0.272727
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
f14ec7e35f5f1d3c29aaa55c0eac105d6781f94a
| 314
|
py
|
Python
|
prog_template.py
|
jonasyip/machineLearningLibrary
|
1809717ff8e5c4a1b5fd3b7cb08e4b179e5fc14a
|
[
"MIT"
] | null | null | null |
prog_template.py
|
jonasyip/machineLearningLibrary
|
1809717ff8e5c4a1b5fd3b7cb08e4b179e5fc14a
|
[
"MIT"
] | null | null | null |
prog_template.py
|
jonasyip/machineLearningLibrary
|
1809717ff8e5c4a1b5fd3b7cb08e4b179e5fc14a
|
[
"MIT"
] | null | null | null |
# ---------- your imports start here
import random
# ---------- your functions start here
def foo(x):
# YOUR CODE HERE
pass
# ---------- your testing functions start here
def test_foo():
# YOUR CODE HERE
pass
if __name__ == "__main__":
# INVOKE YOUR TEST FUNCTION(S) HERE
test_foo()
| 14.952381
| 46
| 0.592357
| 40
| 314
| 4.4
| 0.5
| 0.153409
| 0.204545
| 0.238636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.245223
| 314
| 21
| 47
| 14.952381
| 0.742616
| 0.573248
| 0
| 0.285714
| 0
| 0
| 0.0625
| 0
| 0
| 0
| 0
| 0.047619
| 0
| 1
| 0.285714
| false
| 0.285714
| 0.142857
| 0
| 0.428571
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
f16f4a41409b76490433b383351da2306babd4c3
| 46
|
py
|
Python
|
tests/__init__.py
|
lucasvieirasilva/aws-codeartifact-poetry
|
ad224f8e403373bc6336d362ce11ba84c2bad616
|
[
"MIT"
] | null | null | null |
tests/__init__.py
|
lucasvieirasilva/aws-codeartifact-poetry
|
ad224f8e403373bc6336d362ce11ba84c2bad616
|
[
"MIT"
] | null | null | null |
tests/__init__.py
|
lucasvieirasilva/aws-codeartifact-poetry
|
ad224f8e403373bc6336d362ce11ba84c2bad616
|
[
"MIT"
] | null | null | null |
"""AWS CodeArtifact Poetry CLI unit tests."""
| 23
| 45
| 0.717391
| 6
| 46
| 5.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.130435
| 46
| 1
| 46
| 46
| 0.825
| 0.847826
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
74c199211708134b278918af5c9536577e49ca76
| 30
|
py
|
Python
|
.idea/VirtualEnvironment/Lib/site-packages/tests/outcomes/syntax_error/test_error_using_eval_and_print/main.py
|
Vladpetr/NewsPortal
|
cd4127fbc09d9c8f5e65c8ae699856c6d380a320
|
[
"Apache-2.0"
] | null | null | null |
.idea/VirtualEnvironment/Lib/site-packages/tests/outcomes/syntax_error/test_error_using_eval_and_print/main.py
|
Vladpetr/NewsPortal
|
cd4127fbc09d9c8f5e65c8ae699856c6d380a320
|
[
"Apache-2.0"
] | 5
|
2021-04-08T22:02:15.000Z
|
2022-02-10T14:53:45.000Z
|
.idea/VirtualEnvironment/Lib/site-packages/tests/outcomes/syntax_error/test_error_using_eval_and_print/main.py
|
Vladpetr/NewsPortal
|
cd4127fbc09d9c8f5e65c8ae699856c6d380a320
|
[
"Apache-2.0"
] | null | null | null |
print("123")
print(eval(")"))
| 10
| 16
| 0.566667
| 4
| 30
| 4.25
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.107143
| 0.066667
| 30
| 2
| 17
| 15
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0.133333
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
74ef771659a315d4902505a7381d64512e02845f
| 131
|
py
|
Python
|
inferencer/inferencer.py
|
AlienX456/AM-inferenciatorKafka
|
c92e5050e77ac31ec142a17e219fe51b8dbe7da0
|
[
"MIT"
] | null | null | null |
inferencer/inferencer.py
|
AlienX456/AM-inferenciatorKafka
|
c92e5050e77ac31ec142a17e219fe51b8dbe7da0
|
[
"MIT"
] | 8
|
2021-01-14T13:10:12.000Z
|
2021-07-03T02:36:32.000Z
|
inferencer/inferencer.py
|
AlienX456/AM-inferenciatorKafka
|
c92e5050e77ac31ec142a17e219fe51b8dbe7da0
|
[
"MIT"
] | 5
|
2021-05-23T00:07:30.000Z
|
2021-12-15T22:16:17.000Z
|
from abc import ABC, abstractmethod
class Inferencer(ABC):
@abstractmethod
def run_inferencer(self, ruta):
pass
| 14.555556
| 35
| 0.694656
| 15
| 131
| 6
| 0.733333
| 0.377778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.236641
| 131
| 8
| 36
| 16.375
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0.2
| 0.2
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 5
|
74fb2034fea7b673dc564f3275d0fb481a92c601
| 94
|
py
|
Python
|
pyzeroSR1/__init__.py
|
aasensio/pyzeroSR1
|
8e43269284f8e1bec5b17ea162700bcc8df8f337
|
[
"MIT"
] | 2
|
2015-03-12T17:33:14.000Z
|
2016-10-11T02:42:32.000Z
|
pyzeroSR1/__init__.py
|
aasensio/pyzeroSR1
|
8e43269284f8e1bec5b17ea162700bcc8df8f337
|
[
"MIT"
] | null | null | null |
pyzeroSR1/__init__.py
|
aasensio/pyzeroSR1
|
8e43269284f8e1bec5b17ea162700bcc8df8f337
|
[
"MIT"
] | null | null | null |
from zeroSR1 import *
from . import proxes
from . import smoothFunctions
__version__ = "0.1"
| 15.666667
| 29
| 0.755319
| 12
| 94
| 5.583333
| 0.666667
| 0.298507
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.038462
| 0.170213
| 94
| 5
| 30
| 18.8
| 0.820513
| 0
| 0
| 0
| 0
| 0
| 0.031915
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
2d39c55a7a6e54b35c34bdaca007610e890303ad
| 5,479
|
py
|
Python
|
sponge-integration-tests/examples/core/rules_immediate_duration.py
|
mnpas/sponge
|
7190f23ae888bbef49d0fbb85157444d6ea48bcd
|
[
"Apache-2.0"
] | 9
|
2017-12-16T21:48:57.000Z
|
2022-01-06T12:22:24.000Z
|
sponge-integration-tests/examples/core/rules_immediate_duration.py
|
mnpas/sponge
|
7190f23ae888bbef49d0fbb85157444d6ea48bcd
|
[
"Apache-2.0"
] | 3
|
2020-12-18T11:56:46.000Z
|
2022-03-31T18:37:10.000Z
|
sponge-integration-tests/examples/core/rules_immediate_duration.py
|
mnpas/sponge
|
7190f23ae888bbef49d0fbb85157444d6ea48bcd
|
[
"Apache-2.0"
] | 2
|
2019-12-29T16:08:32.000Z
|
2020-06-15T14:05:34.000Z
|
"""
Sponge Knowledge Base
Using rules - immediate, duration
"""
from org.openksavi.sponge.examples.util import CorrelationEventsLog
from org.openksavi.sponge.core.event import EventId
def onInit():
global defaultDuration, correlationEventsLog
defaultDuration = 2
# Variables for assertions only
correlationEventsLog = CorrelationEventsLog()
sponge.setVariable("correlationEventsLog", correlationEventsLog)
def runRule(rule):
rule.logger.debug("Sequence: {}", SpongeUtils.getAbbreviatedEventSequenceString(rule))
global correlationEventsLog
correlationEventsLog.addEvents(rule.meta.name, rule)
# Naming F(irst), L(ast), A(ll), N(one)
# F(irst)F(irst)F(irst)
class RuleFFF(Rule):
def onConfigure(self):
self.withEvents(["e1", "e2", "e3 :first"])
global defaultDuration
self.withDuration(Duration.ofSeconds(defaultDuration))
def onRun(self, event):
runRule(self)
# F(irst)F(irst)L(ast)
class RuleFFL(Rule):
def onConfigure(self):
self.withEvents(["e1", "e2", "e3 :last"])
global defaultDuration
self.withDuration(Duration.ofSeconds(defaultDuration))
def onRun(self, event):
runRule(self)
# F(irst)F(irst)A(ll)
class RuleFFA(Rule):
def onConfigure(self):
self.withEvents(["e1", "e2", "e3 :all"])
global defaultDuration
self.withDuration(Duration.ofSeconds(defaultDuration))
def onRun(self, event):
runRule(self)
# F(irst)F(irst)N(one)
class RuleFFN(Rule):
def onConfigure(self):
self.withEvents(["e1", "e2", "e4 :none"])
global defaultDuration
self.withDuration(Duration.ofSeconds(defaultDuration))
def onRun(self, event):
runRule(self)
# F(irst)L(ast)F(irst)
class RuleFLF(Rule):
def onConfigure(self):
self.withEvents(["e1", "e2 :last", "e3 :first"])
global defaultDuration
self.withDuration(Duration.ofSeconds(defaultDuration))
def onRun(self, event):
runRule(self)
# F(irst)L(ast)L(ast)
class RuleFLL(Rule):
def onConfigure(self):
self.withEvents(["e1", "e2 :last", "e3 :last"])
global defaultDuration
self.withDuration(Duration.ofSeconds(defaultDuration))
def onRun(self, event):
runRule(self)
# F(irst)L(ast)A(ll)
class RuleFLA(Rule):
def onConfigure(self):
self.withEvents(["e1", "e2 :last", "e3 :all"])
global defaultDuration
self.withDuration(Duration.ofSeconds(defaultDuration))
def onRun(self, event):
runRule(self)
# F(irst)L(ast)N(one)
class RuleFLN(Rule):
def onConfigure(self):
self.withEvents(["e1", "e2 :last", "e4 :none"])
global defaultDuration
self.withDuration(Duration.ofSeconds(defaultDuration))
def onRun(self, event):
runRule(self)
# F(irst)A(ll)F(irst)
class RuleFAF(Rule):
def onConfigure(self):
self.withEvents(["e1", "e2 :all", "e3 :first"])
global defaultDuration
self.withDuration(Duration.ofSeconds(defaultDuration))
def onRun(self, event):
runRule(self)
# F(irst)A(ll)L(ast)
class RuleFAL(Rule):
def onConfigure(self):
self.withEvents(["e1", "e2 :all", "e3 :last"])
global defaultDuration
self.withDuration(Duration.ofSeconds(defaultDuration))
def onRun(self, event):
runRule(self)
# F(irst)A(ll)A(ll)
class RuleFAA(Rule):
def onConfigure(self):
self.withEvents(["e1", "e2 :all", "e3 :all"])
global defaultDuration
self.withDuration(Duration.ofSeconds(defaultDuration))
def onRun(self, event):
runRule(self)
# F(irst)A(ll)N(one)
class RuleFAN(Rule):
def onConfigure(self):
self.withEvents(["e1", "e2 :all", "e5 :none"])
global defaultDuration
self.withDuration(Duration.ofSeconds(defaultDuration))
def onRun(self, event):
runRule(self)
# F(irst)N(one)F(irst)
class RuleFNF(Rule):
def onConfigure(self):
self.withEvents(["e1", "e5 :none", "e3"])
global defaultDuration
self.withDuration(Duration.ofSeconds(defaultDuration))
def onRun(self, event):
runRule(self)
# F(irst)N(one)L(ast)
class RuleFNL(Rule):
def onConfigure(self):
self.withEvents(["e1", "e5 :none", "e3 :last"])
global defaultDuration
self.withDuration(Duration.ofSeconds(defaultDuration))
def onRun(self, event):
runRule(self)
# F(irst)N(one)A(ll)
class RuleFNA(Rule):
def onConfigure(self):
self.withEvents(["e1", "e5 :none", "e3 :all"])
global defaultDuration
self.withDuration(Duration.ofSeconds(defaultDuration))
def onRun(self, event):
runRule(self)
class RuleFNFReject(Rule):
def onConfigure(self):
self.withEvents(["e1", "e2 :none", "e3"])
global defaultDuration
self.withDuration(Duration.ofSeconds(defaultDuration))
def onRun(self, event):
runRule(self)
def onStartup():
sponge.event("e1").set("label", "1").send()
sponge.event("e2").set("label", "2").send()
sponge.event("e2").set("label", "3").send()
sponge.event("e3").set("label", "4").send()
sponge.event("e2").set("label", "5").send()
sponge.event("e3").set("label", "6").send()
sponge.event("e3").set("label", "7").send()
| 31.130682
| 91
| 0.625479
| 618
| 5,479
| 5.545307
| 0.142395
| 0.035016
| 0.084039
| 0.102714
| 0.761891
| 0.761891
| 0.713744
| 0.713744
| 0.6904
| 0.629997
| 0
| 0.01488
| 0.227231
| 5,479
| 175
| 92
| 31.308571
| 0.794521
| 0.077386
| 0
| 0.610687
| 0
| 0
| 0.068987
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.267176
| false
| 0
| 0.015267
| 0
| 0.40458
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
742e8f73907c217d1a6446d4888544e4a207e4aa
| 228
|
py
|
Python
|
src/Calculator.py
|
jinhongtan/Calculator
|
4f90e8f273121f132dd7dd562c798c6b4e1937d1
|
[
"MIT"
] | null | null | null |
src/Calculator.py
|
jinhongtan/Calculator
|
4f90e8f273121f132dd7dd562c798c6b4e1937d1
|
[
"MIT"
] | null | null | null |
src/Calculator.py
|
jinhongtan/Calculator
|
4f90e8f273121f132dd7dd562c798c6b4e1937d1
|
[
"MIT"
] | 1
|
2021-07-13T03:26:17.000Z
|
2021-07-13T03:26:17.000Z
|
import math
def add(a, b):
return a + b
def minus(a, b):
return a - b
def multi(a, b):
return a * b
def divide(a, b):
return a / b
def square(a):
return a*a
def squareRoot(a):
return math.sqrt(a)
| 10.363636
| 23
| 0.561404
| 43
| 228
| 2.976744
| 0.302326
| 0.125
| 0.25
| 0.28125
| 0.40625
| 0.40625
| 0
| 0
| 0
| 0
| 0
| 0
| 0.307018
| 228
| 22
| 23
| 10.363636
| 0.810127
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.461538
| false
| 0
| 0.076923
| 0.461538
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
744ad6b5f7a1e651346e7b64ddad808639ba730b
| 4,766
|
py
|
Python
|
tests/annotators/test_entrez.py
|
NCI-GDC/aliquot-maf-tools
|
6aec9490ab7194ec605bf02c4c8e7c1cfca53973
|
[
"Apache-2.0"
] | 1
|
2020-09-18T17:52:37.000Z
|
2020-09-18T17:52:37.000Z
|
tests/annotators/test_entrez.py
|
NCI-GDC/aliquot-maf-tools
|
6aec9490ab7194ec605bf02c4c8e7c1cfca53973
|
[
"Apache-2.0"
] | null | null | null |
tests/annotators/test_entrez.py
|
NCI-GDC/aliquot-maf-tools
|
6aec9490ab7194ec605bf02c4c8e7c1cfca53973
|
[
"Apache-2.0"
] | 1
|
2020-08-14T08:49:39.000Z
|
2020-08-14T08:49:39.000Z
|
"""
Tests for the ``aliquotmaf.annotators.Cosmic`` class.
"""
from collections import OrderedDict
import pytest
from maflib.column_types import EntrezGeneId, NullableStringColumn, StringColumn
from aliquotmaf.annotators.entrez import MAF_FEATURE, MAF_SYMBOL, Entrez
from aliquotmaf.converters.builder import get_builder
@pytest.fixture
def setup_annotator():
created = []
def _make_annotator(scheme, entrez_json_file):
curr = Entrez.setup(scheme, entrez_json_file)
created.append(curr)
return curr
yield _make_annotator
for record in created:
record.shutdown()
@pytest.fixture
def test_scheme(get_test_scheme):
coldict = OrderedDict(
[
(MAF_SYMBOL, StringColumn),
(MAF_FEATURE, NullableStringColumn),
("Entrez_Gene_Id", EntrezGeneId),
]
)
return get_test_scheme(coldict)
def test_setup_entrez(test_scheme, setup_annotator, get_test_file):
json_path = get_test_file("ex_entrez.json")
annotator = setup_annotator(test_scheme, entrez_json_file=json_path)
assert isinstance(annotator, Entrez)
def test_entrez_symbol_and_feature(
test_scheme,
setup_annotator,
get_test_file,
get_empty_maf_record,
):
# setup annotator
json_path = get_test_file("ex_entrez.json")
annotator = setup_annotator(test_scheme, entrez_json_file=json_path)
init_maf_record = get_empty_maf_record
init_maf_record[MAF_SYMBOL] = get_builder(
MAF_SYMBOL, test_scheme, value='PRAMEF27', default=''
)
init_maf_record[MAF_FEATURE] = get_builder(
MAF_FEATURE, test_scheme, value='ENST00000436041', default=''
)
# print(test_scheme.column_class('Entrez_Gene_Id').__name__)
maf_record = annotator.annotate(init_maf_record)
assert maf_record['Entrez_Gene_Id'].value == 101929983
def test_entrez_symbol_only(
test_scheme,
setup_annotator,
get_test_file,
get_empty_maf_record,
):
# setup annotator
json_path = get_test_file("ex_entrez.json")
annotator = setup_annotator(test_scheme, entrez_json_file=json_path)
init_maf_record = get_empty_maf_record
init_maf_record[MAF_SYMBOL] = get_builder(
MAF_SYMBOL, test_scheme, value='PRAMEF27', default=''
)
# print(test_scheme.column_class('Entrez_Gene_Id').__name__)
maf_record = annotator.annotate(init_maf_record)
assert maf_record['Entrez_Gene_Id'].value == 101929983
def test_feature_only(
test_scheme,
setup_annotator,
get_test_file,
get_empty_maf_record,
):
# setup annotator
json_path = get_test_file("ex_entrez.json")
annotator = setup_annotator(test_scheme, entrez_json_file=json_path)
init_maf_record = get_empty_maf_record
init_maf_record[MAF_FEATURE] = get_builder(
MAF_FEATURE, test_scheme, value='ENST00000436041', default=''
)
# print(test_scheme.column_class('Entrez_Gene_Id').__name__)
maf_record = annotator.annotate(init_maf_record)
assert maf_record['Entrez_Gene_Id'].value == 101929983
def test_neither_id_present_in_query(
test_scheme,
setup_annotator,
get_test_file,
get_empty_maf_record,
):
# setup annotator
json_path = get_test_file("ex_entrez.json")
annotator = setup_annotator(test_scheme, entrez_json_file=json_path)
init_maf_record = get_empty_maf_record
# print(test_scheme.column_class('Entrez_Gene_Id').__name__)
maf_record = annotator.annotate(init_maf_record)
assert maf_record['Entrez_Gene_Id'].value is None
def test_symbol_not_present_in_database(
test_scheme,
setup_annotator,
get_test_file,
get_empty_maf_record,
):
# setup annotator
json_path = get_test_file("ex_entrez.json")
annotator = setup_annotator(test_scheme, entrez_json_file=json_path)
init_maf_record = get_empty_maf_record
init_maf_record[MAF_SYMBOL] = get_builder(
MAF_SYMBOL, test_scheme, value='NOTAGENE', default=''
)
# print(test_scheme.column_class('Entrez_Gene_Id').__name__)
maf_record = annotator.annotate(init_maf_record)
assert maf_record['Entrez_Gene_Id'].value is None
def test_gencode_id_not_present_in_database(
test_scheme,
setup_annotator,
get_test_file,
get_empty_maf_record,
):
# setup annotator
json_path = get_test_file("ex_entrez.json")
annotator = setup_annotator(test_scheme, entrez_json_file=json_path)
init_maf_record = get_empty_maf_record
init_maf_record[MAF_FEATURE] = get_builder(
MAF_FEATURE, test_scheme, value='ENST99999999999', default=''
)
# print(test_scheme.column_class('Entrez_Gene_Id').__name__)
maf_record = annotator.annotate(init_maf_record)
assert maf_record['Entrez_Gene_Id'].value is None
| 27.390805
| 80
| 0.739194
| 626
| 4,766
| 5.135783
| 0.113419
| 0.117574
| 0.072784
| 0.063453
| 0.761742
| 0.761742
| 0.761742
| 0.750855
| 0.750855
| 0.750855
| 0
| 0.016277
| 0.17499
| 4,766
| 173
| 81
| 27.549133
| 0.801373
| 0.105749
| 0
| 0.654867
| 0
| 0
| 0.062456
| 0
| 0
| 0
| 0
| 0
| 0.061947
| 1
| 0.088496
| false
| 0
| 0.044248
| 0
| 0.150442
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
7459f1b4b4edbc2cf4534a158db6eceeb17d0498
| 82
|
py
|
Python
|
src/oidcop/session/__init__.py
|
cnjlq84/oidc-op
|
c7ab007327cc1a5e69abba7699c0be5f29534049
|
[
"Apache-2.0"
] | 31
|
2020-09-15T21:18:05.000Z
|
2022-02-17T02:50:04.000Z
|
src/oidcop/session/__init__.py
|
cnjlq84/oidc-op
|
c7ab007327cc1a5e69abba7699c0be5f29534049
|
[
"Apache-2.0"
] | 106
|
2021-03-26T17:12:54.000Z
|
2022-03-11T07:19:46.000Z
|
src/oidcop/session/__init__.py
|
cnjlq84/oidc-op
|
c7ab007327cc1a5e69abba7699c0be5f29534049
|
[
"Apache-2.0"
] | 13
|
2020-02-12T16:31:01.000Z
|
2022-03-03T09:54:44.000Z
|
class Revoked(Exception):
pass
class MintingNotAllowed(Exception):
pass
| 11.714286
| 35
| 0.731707
| 8
| 82
| 7.5
| 0.625
| 0.433333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.195122
| 82
| 6
| 36
| 13.666667
| 0.909091
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
7468fbca63a8c15193434a4b3da2c141d320366f
| 152
|
py
|
Python
|
search.py
|
tolulomo/whyis
|
eb50ab3301eb7efd27a1a3f6fb2305dedd910397
|
[
"Apache-2.0"
] | 31
|
2018-05-30T02:41:23.000Z
|
2021-10-17T01:25:20.000Z
|
search.py
|
tolulomo/whyis
|
eb50ab3301eb7efd27a1a3f6fb2305dedd910397
|
[
"Apache-2.0"
] | 115
|
2018-04-07T00:59:11.000Z
|
2022-03-02T03:06:45.000Z
|
search.py
|
tolulomo/whyis
|
eb50ab3301eb7efd27a1a3f6fb2305dedd910397
|
[
"Apache-2.0"
] | 25
|
2018-04-07T00:49:55.000Z
|
2021-09-28T14:29:18.000Z
|
import warnings as __warnings
__warnings.warn("unqualified module names deprecated, use whyis.search", DeprecationWarning)
from whyis.search import *
| 25.333333
| 92
| 0.822368
| 18
| 152
| 6.722222
| 0.722222
| 0.181818
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111842
| 152
| 5
| 93
| 30.4
| 0.896296
| 0
| 0
| 0
| 0
| 0
| 0.348684
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
748fdccb579948a534ede0359c3ba0bef3c59fd8
| 211
|
py
|
Python
|
simulator/src/graphing/graphing.py
|
larashores/burridgeknopoffsimulator
|
2201b266f8fde00325dc0321acc6aa3f3e2c66f8
|
[
"MIT"
] | 1
|
2019-05-16T12:50:03.000Z
|
2019-05-16T12:50:03.000Z
|
simulator/src/graphing/graphing.py
|
vinceshores/burridgeknopoffsimulator
|
2201b266f8fde00325dc0321acc6aa3f3e2c66f8
|
[
"MIT"
] | null | null | null |
simulator/src/graphing/graphing.py
|
vinceshores/burridgeknopoffsimulator
|
2201b266f8fde00325dc0321acc6aa3f3e2c66f8
|
[
"MIT"
] | 1
|
2021-03-02T14:44:36.000Z
|
2021-03-02T14:44:36.000Z
|
from graphing.draw import draw
from graphing.graph import Graph
from graphing.histogram import Histogram
from graphing.suplot import SubPlot
from graphing.curve_fit import CurveFit
from graphing.line import Line
| 35.166667
| 40
| 0.862559
| 31
| 211
| 5.83871
| 0.387097
| 0.39779
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.109005
| 211
| 6
| 41
| 35.166667
| 0.962766
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
77906598a2f89a73d6b7d642d5592f59870146ce
| 6,401
|
py
|
Python
|
nfv/nfv-vim/nfv_vim/rpc/__init__.py
|
riddopic/nfv
|
e5ced4ade4916910646bcf8018dfabadef447fc2
|
[
"Apache-2.0"
] | 2
|
2020-02-07T19:01:36.000Z
|
2022-02-23T01:41:46.000Z
|
nfv/nfv-vim/nfv_vim/rpc/__init__.py
|
riddopic/nfv
|
e5ced4ade4916910646bcf8018dfabadef447fc2
|
[
"Apache-2.0"
] | 1
|
2021-01-14T12:02:25.000Z
|
2021-01-14T12:02:25.000Z
|
nfv/nfv-vim/nfv_vim/rpc/__init__.py
|
riddopic/nfv
|
e5ced4ade4916910646bcf8018dfabadef447fc2
|
[
"Apache-2.0"
] | 2
|
2021-01-13T08:39:21.000Z
|
2022-02-09T00:21:55.000Z
|
#
# Copyright (c) 2015-2021 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
from nfv_vim.rpc._rpc_defs import RPC_MSG_RESULT # noqa: F401
from nfv_vim.rpc._rpc_defs import RPC_MSG_TYPE # noqa: F401
from nfv_vim.rpc._rpc_defs import RPC_MSG_VERSION # noqa: F401
from nfv_vim.rpc._rpc_message import RPCMessage # noqa: F401
from nfv_vim.rpc._rpc_message_image import APIRequestCreateImage # noqa: F401
from nfv_vim.rpc._rpc_message_image import APIRequestDeleteImage # noqa: F401
from nfv_vim.rpc._rpc_message_image import APIRequestGetImage # noqa: F401
from nfv_vim.rpc._rpc_message_image import APIRequestUpdateImage # noqa: F401
from nfv_vim.rpc._rpc_message_image import APIResponseCreateImage # noqa: F401
from nfv_vim.rpc._rpc_message_image import APIResponseDeleteImage # noqa: F401
from nfv_vim.rpc._rpc_message_image import APIResponseGetImage # noqa: F401
from nfv_vim.rpc._rpc_message_image import APIResponseUpdateImage # noqa: F401
from nfv_vim.rpc._rpc_message_volume import APIRequestCreateVolume # noqa: F401
from nfv_vim.rpc._rpc_message_volume import APIRequestDeleteVolume # noqa: F401
from nfv_vim.rpc._rpc_message_volume import APIRequestGetVolume # noqa: F401
from nfv_vim.rpc._rpc_message_volume import APIRequestUpdateVolume # noqa: F401
from nfv_vim.rpc._rpc_message_volume import APIResponseCreateVolume # noqa: F401
from nfv_vim.rpc._rpc_message_volume import APIResponseDeleteVolume # noqa: F401
from nfv_vim.rpc._rpc_message_volume import APIResponseGetVolume # noqa: F401
from nfv_vim.rpc._rpc_message_volume import APIResponseUpdateVolume # noqa: F401
from nfv_vim.rpc._rpc_message_instance import APIRequestColdMigrateInstance # noqa: F401
from nfv_vim.rpc._rpc_message_instance import APIRequestCreateInstance # noqa: F401
from nfv_vim.rpc._rpc_message_instance import APIRequestDeleteInstance # noqa: F401
from nfv_vim.rpc._rpc_message_instance import APIRequestEvacuateInstance # noqa: F401
from nfv_vim.rpc._rpc_message_instance import APIRequestGetInstance # noqa: F401
from nfv_vim.rpc._rpc_message_instance import APIRequestLiveMigrateInstance # noqa: F401
from nfv_vim.rpc._rpc_message_instance import APIRequestPauseInstance # noqa: F401
from nfv_vim.rpc._rpc_message_instance import APIRequestRebootInstance # noqa: F401
from nfv_vim.rpc._rpc_message_instance import APIRequestResumeInstance # noqa: F401
from nfv_vim.rpc._rpc_message_instance import APIRequestStartInstance # noqa: F401
from nfv_vim.rpc._rpc_message_instance import APIRequestStopInstance # noqa: F401
from nfv_vim.rpc._rpc_message_instance import APIRequestSuspendInstance # noqa: F401
from nfv_vim.rpc._rpc_message_instance import APIRequestUnpauseInstance # noqa: F401
from nfv_vim.rpc._rpc_message_instance import APIResponseColdMigrateInstance # noqa: F401
from nfv_vim.rpc._rpc_message_instance import APIResponseCreateInstance # noqa: F401
from nfv_vim.rpc._rpc_message_instance import APIResponseDeleteInstance # noqa: F401
from nfv_vim.rpc._rpc_message_instance import APIResponseEvacuateInstance # noqa: F401
from nfv_vim.rpc._rpc_message_instance import APIResponseGetInstance # noqa: F401
from nfv_vim.rpc._rpc_message_instance import APIResponseLiveMigrateInstance # noqa: F401
from nfv_vim.rpc._rpc_message_instance import APIResponsePauseInstance # noqa: F401
from nfv_vim.rpc._rpc_message_instance import APIResponseRebootInstance # noqa: F401
from nfv_vim.rpc._rpc_message_instance import APIResponseResumeInstance # noqa: F401
from nfv_vim.rpc._rpc_message_instance import APIResponseStartInstance # noqa: F401
from nfv_vim.rpc._rpc_message_instance import APIResponseStopInstance # noqa: F401
from nfv_vim.rpc._rpc_message_instance import APIResponseSuspendInstance # noqa: F401
from nfv_vim.rpc._rpc_message_instance import APIResponseUnpauseInstance # noqa: F401
from nfv_vim.rpc._rpc_message_subnet import APIRequestCreateSubnet # noqa: F401
from nfv_vim.rpc._rpc_message_subnet import APIRequestDeleteSubnet # noqa: F401
from nfv_vim.rpc._rpc_message_subnet import APIRequestGetSubnet # noqa: F401
from nfv_vim.rpc._rpc_message_subnet import APIRequestUpdateSubnet # noqa: F401
from nfv_vim.rpc._rpc_message_subnet import APIResponseCreateSubnet # noqa: F401
from nfv_vim.rpc._rpc_message_subnet import APIResponseDeleteSubnet # noqa: F401
from nfv_vim.rpc._rpc_message_subnet import APIResponseGetSubnet # noqa: F401
from nfv_vim.rpc._rpc_message_subnet import APIResponseUpdateSubnet # noqa: F401
from nfv_vim.rpc._rpc_message_network import APIRequestCreateNetwork # noqa: F401
from nfv_vim.rpc._rpc_message_network import APIRequestDeleteNetwork # noqa: F401
from nfv_vim.rpc._rpc_message_network import APIRequestGetNetwork # noqa: F401
from nfv_vim.rpc._rpc_message_network import APIRequestUpdateNetwork # noqa: F401
from nfv_vim.rpc._rpc_message_network import APIResponseCreateNetwork # noqa: F401
from nfv_vim.rpc._rpc_message_network import APIResponseDeleteNetwork # noqa: F401
from nfv_vim.rpc._rpc_message_network import APIResponseGetNetwork # noqa: F401
from nfv_vim.rpc._rpc_message_network import APIResponseUpdateNetwork # noqa: F401
from nfv_vim.rpc._rpc_message_sw_update import APIRequestAbortSwUpdateStrategy # noqa: F401
from nfv_vim.rpc._rpc_message_sw_update import APIRequestApplySwUpdateStrategy # noqa: F401
from nfv_vim.rpc._rpc_message_sw_update import APIRequestCreateKubeRootcaUpdateStrategy # noqa: F401
from nfv_vim.rpc._rpc_message_sw_update import APIRequestCreateKubeUpgradeStrategy # noqa: F401
from nfv_vim.rpc._rpc_message_sw_update import APIRequestCreateSwUpdateStrategy # noqa: F401
from nfv_vim.rpc._rpc_message_sw_update import APIRequestCreateSwUpgradeStrategy # noqa: F401
from nfv_vim.rpc._rpc_message_sw_update import APIRequestDeleteSwUpdateStrategy # noqa: F401
from nfv_vim.rpc._rpc_message_sw_update import APIRequestGetSwUpdateStrategy # noqa: F401
from nfv_vim.rpc._rpc_message_sw_update import APIResponseAbortSwUpdateStrategy # noqa: F401
from nfv_vim.rpc._rpc_message_sw_update import APIResponseApplySwUpdateStrategy # noqa: F401
from nfv_vim.rpc._rpc_message_sw_update import APIResponseCreateSwUpdateStrategy # noqa: F401
from nfv_vim.rpc._rpc_message_sw_update import APIResponseDeleteSwUpdateStrategy # noqa: F401
from nfv_vim.rpc._rpc_message_sw_update import APIResponseGetSwUpdateStrategy # noqa: F401
| 73.574713
| 101
| 0.853929
| 854
| 6,401
| 6.031616
| 0.124122
| 0.101922
| 0.145603
| 0.189284
| 0.635605
| 0.635605
| 0.635605
| 0.635605
| 0.629586
| 0.623374
| 0
| 0.04065
| 0.09686
| 6,401
| 86
| 102
| 74.430233
| 0.850372
| 0.142009
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
779ea9f6d0df45427a721d863e36fdfccf213777
| 113
|
py
|
Python
|
app/schemas/cashback.py
|
wlsouza/cashbackgb
|
c5cffe782eb0f8c2ec0303405820e49c494d04a3
|
[
"MIT"
] | null | null | null |
app/schemas/cashback.py
|
wlsouza/cashbackgb
|
c5cffe782eb0f8c2ec0303405820e49c494d04a3
|
[
"MIT"
] | null | null | null |
app/schemas/cashback.py
|
wlsouza/cashbackgb
|
c5cffe782eb0f8c2ec0303405820e49c494d04a3
|
[
"MIT"
] | 1
|
2022-02-10T04:15:19.000Z
|
2022-02-10T04:15:19.000Z
|
from pydantic import BaseModel
# Properties to return to client
class CashBack(BaseModel):
cashback: float
| 16.142857
| 32
| 0.778761
| 14
| 113
| 6.285714
| 0.785714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.176991
| 113
| 6
| 33
| 18.833333
| 0.946237
| 0.265487
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
77a35f295819c62c0efed0f3057d0cd793146d8e
| 573
|
py
|
Python
|
tcex/testing/__init__.py
|
kdeltared/tcex
|
818c0d09256764f871e42d9ca5916f92d941d882
|
[
"Apache-2.0"
] | 18
|
2017-01-09T22:17:49.000Z
|
2022-01-24T20:46:42.000Z
|
tcex/testing/__init__.py
|
kdeltared/tcex
|
818c0d09256764f871e42d9ca5916f92d941d882
|
[
"Apache-2.0"
] | 84
|
2017-04-11T13:47:49.000Z
|
2022-03-21T20:12:57.000Z
|
tcex/testing/__init__.py
|
kdeltared/tcex
|
818c0d09256764f871e42d9ca5916f92d941d882
|
[
"Apache-2.0"
] | 43
|
2017-01-05T20:40:26.000Z
|
2022-03-31T19:18:02.000Z
|
"""Testing module for TcEx Framework"""
# flake8: noqa
# first-party
from tcex.testing.monkeypatch import monkeypatch, register_monkeypatch
from tcex.testing.stage_data import Stager
from tcex.testing.test_case_api_service import TestCaseApiService
from tcex.testing.test_case_job import TestCaseJob
from tcex.testing.test_case_playbook import TestCasePlaybook
from tcex.testing.test_case_trigger_service import TestCaseTriggerService
from tcex.testing.test_case_webhook_trigger_service import TestCaseWebhookTriggerService
from tcex.testing.validate_data import Validator
| 47.75
| 88
| 0.877836
| 75
| 573
| 6.48
| 0.413333
| 0.131687
| 0.246914
| 0.195473
| 0.236626
| 0
| 0
| 0
| 0
| 0
| 0
| 0.001887
| 0.075044
| 573
| 11
| 89
| 52.090909
| 0.915094
| 0.102967
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
77c826f3aebd5eb01aefdbc744c1da093fe5e81b
| 309
|
py
|
Python
|
dumpster/storage.py
|
ritchie46/dumpster
|
d57e4c4ab7dc58edcaf8de3a15e50824e7adc4c7
|
[
"MIT"
] | 2
|
2019-11-11T11:47:14.000Z
|
2019-11-11T11:56:52.000Z
|
dumpster/storage.py
|
ritchie46/dumpster
|
d57e4c4ab7dc58edcaf8de3a15e50824e7adc4c7
|
[
"MIT"
] | 2
|
2019-11-11T12:14:47.000Z
|
2019-11-11T12:15:06.000Z
|
dumpster/storage.py
|
ritchie46/dumpster
|
d57e4c4ab7dc58edcaf8de3a15e50824e7adc4c7
|
[
"MIT"
] | null | null | null |
def read_blob(blob, bucket):
return bucket.blob(blob).download_as_string().decode("utf-8", errors="ignore")
def download_blob(blob, file_obj, bucket):
return bucket.blob(blob).download_to_file(file_obj)
def write_blob(key, file_obj, bucket):
return bucket.blob(key).upload_from_file(file_obj)
| 28.090909
| 82
| 0.754045
| 49
| 309
| 4.489796
| 0.408163
| 0.145455
| 0.245455
| 0.3
| 0.472727
| 0.472727
| 0
| 0
| 0
| 0
| 0
| 0.003636
| 0.110032
| 309
| 10
| 83
| 30.9
| 0.796364
| 0
| 0
| 0
| 0
| 0
| 0.035599
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
77e0f3ba0781cc12758e7f18ff030b904afceaf1
| 380
|
py
|
Python
|
app/errors/handlers.py
|
chinmaya-dev/dashboard-rs
|
0dbc16fe25302edb3c8663d19ddf8a529d1dda99
|
[
"MIT"
] | null | null | null |
app/errors/handlers.py
|
chinmaya-dev/dashboard-rs
|
0dbc16fe25302edb3c8663d19ddf8a529d1dda99
|
[
"MIT"
] | 11
|
2019-12-26T17:20:25.000Z
|
2022-03-21T22:16:55.000Z
|
app/errors/handlers.py
|
chinmaya-dev/ttbdonation
|
1ea4cb2c279db86465040b68f1fa48dbb5f7e17c
|
[
"MIT"
] | null | null | null |
from flask import render_template
from app import db
from app.errors import bp
@bp.app_errorhandler(404)
def error_404(error):
return render_template('errors/404.html'), 404
@bp.app_errorhandler(403)
def error_403(error):
return render_template('errors/403.html'), 403
@bp.app_errorhandler(500)
def error_500(error):
return render_template('errors/500.html'), 500
| 23.75
| 50
| 0.765789
| 59
| 380
| 4.762712
| 0.288136
| 0.199288
| 0.181495
| 0.266904
| 0.330961
| 0
| 0
| 0
| 0
| 0
| 0
| 0.107784
| 0.121053
| 380
| 15
| 51
| 25.333333
| 0.733533
| 0
| 0
| 0
| 0
| 0
| 0.118421
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0.25
| 0.75
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
7ac450eebf733ab3502d35b1197b3ab9e588c8b6
| 119
|
py
|
Python
|
ganslate/nn/gans/unpaired/__init__.py
|
ibro45/a
|
a90d92eaf041331cd3397f788cb60884cb0e176b
|
[
"BSD-3-Clause"
] | 17
|
2021-09-07T15:23:04.000Z
|
2022-01-28T15:46:54.000Z
|
ganslate/nn/gans/unpaired/__init__.py
|
ibro45/a
|
a90d92eaf041331cd3397f788cb60884cb0e176b
|
[
"BSD-3-Clause"
] | 18
|
2021-09-08T12:31:39.000Z
|
2021-12-13T15:26:01.000Z
|
ganslate/nn/gans/unpaired/__init__.py
|
ibro45/a
|
a90d92eaf041331cd3397f788cb60884cb0e176b
|
[
"BSD-3-Clause"
] | 2
|
2021-11-10T11:23:00.000Z
|
2022-02-10T07:57:20.000Z
|
from .cut import CUT, CUTConfig
from .cyclegan import CycleGAN, CycleGANConfig
from .revgan import RevGAN, RevGANConfig
| 39.666667
| 46
| 0.831933
| 15
| 119
| 6.6
| 0.533333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117647
| 119
| 3
| 47
| 39.666667
| 0.942857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
7ae368c41212ccc5abbd1b094d63b8bdd8abd6d6
| 124
|
py
|
Python
|
accounts/tests.py
|
pabulumm/neighbors
|
59f3f3ae727fe52c7897beaf73d157b02cdcb7a3
|
[
"BSD-3-Clause"
] | null | null | null |
accounts/tests.py
|
pabulumm/neighbors
|
59f3f3ae727fe52c7897beaf73d157b02cdcb7a3
|
[
"BSD-3-Clause"
] | null | null | null |
accounts/tests.py
|
pabulumm/neighbors
|
59f3f3ae727fe52c7897beaf73d157b02cdcb7a3
|
[
"BSD-3-Clause"
] | null | null | null |
from django.test import TestCase
from .models import UserProfile
class UserTestCase(TestCase):
def setUp(self):
pass
| 12.4
| 32
| 0.774194
| 16
| 124
| 6
| 0.8125
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.16129
| 124
| 9
| 33
| 13.777778
| 0.923077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0.2
| 0.4
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
|
0
| 5
|
bb29326a3a114348d558dcdc05a1b6ecd16b56c6
| 52
|
py
|
Python
|
test/verification/__init__.py
|
warmwaver/calla
|
6667bfc51e3ed66eb0ae3491f827b893e4d8aa0b
|
[
"MIT"
] | 7
|
2018-10-11T09:03:09.000Z
|
2022-02-23T01:34:12.000Z
|
test/verification/__init__.py
|
warmwaver/calla
|
6667bfc51e3ed66eb0ae3491f827b893e4d8aa0b
|
[
"MIT"
] | null | null | null |
test/verification/__init__.py
|
warmwaver/calla
|
6667bfc51e3ed66eb0ae3491f827b893e4d8aa0b
|
[
"MIT"
] | 1
|
2021-03-13T11:59:43.000Z
|
2021-03-13T11:59:43.000Z
|
from . import GB
from . import JTG
from . import TB
| 13
| 17
| 0.711538
| 9
| 52
| 4.111111
| 0.555556
| 0.810811
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.230769
| 52
| 3
| 18
| 17.333333
| 0.925
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
bb505949bae5da8042648ba1f147e13d3280af5a
| 48
|
py
|
Python
|
2 test2.py
|
keerthana1502/python_practice
|
8c0499e014826af78f9a88730551ace3fa79686d
|
[
"bzip2-1.0.6"
] | null | null | null |
2 test2.py
|
keerthana1502/python_practice
|
8c0499e014826af78f9a88730551ace3fa79686d
|
[
"bzip2-1.0.6"
] | null | null | null |
2 test2.py
|
keerthana1502/python_practice
|
8c0499e014826af78f9a88730551ace3fa79686d
|
[
"bzip2-1.0.6"
] | null | null | null |
if 1:
print("1 is ")
else:
print("???")
| 9.6
| 18
| 0.416667
| 7
| 48
| 2.857143
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.060606
| 0.3125
| 48
| 4
| 19
| 12
| 0.545455
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0.5
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
247c7c896a83c0bfec1378a84fb9329c555be3d7
| 119
|
py
|
Python
|
tests.py
|
gorbulus/DS-Boston_Housing_Data
|
9c61ad8ac57b61479ce2b1dd36d37509f93cf224
|
[
"MIT"
] | null | null | null |
tests.py
|
gorbulus/DS-Boston_Housing_Data
|
9c61ad8ac57b61479ce2b1dd36d37509f93cf224
|
[
"MIT"
] | null | null | null |
tests.py
|
gorbulus/DS-Boston_Housing_Data
|
9c61ad8ac57b61479ce2b1dd36d37509f93cf224
|
[
"MIT"
] | null | null | null |
# tests.py
# William Ponton
# 1.28.19
# Unit tests for boston_housing_dataset project
# Import modules
import unittest
| 17
| 47
| 0.781513
| 18
| 119
| 5.055556
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.049505
| 0.151261
| 119
| 7
| 48
| 17
| 0.851485
| 0.773109
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
24c2416798d0e85f0fb81aaf71542fc00e2504bc
| 132
|
py
|
Python
|
impl/recommender/document/__init__.py
|
dustywind/bachelor-thesis
|
be06aaeb1b4d73f727a19029a3416a9b8043194d
|
[
"MIT"
] | null | null | null |
impl/recommender/document/__init__.py
|
dustywind/bachelor-thesis
|
be06aaeb1b4d73f727a19029a3416a9b8043194d
|
[
"MIT"
] | null | null | null |
impl/recommender/document/__init__.py
|
dustywind/bachelor-thesis
|
be06aaeb1b4d73f727a19029a3416a9b8043194d
|
[
"MIT"
] | null | null | null |
from .document import Document
from .documentmanager import DocumentManager
from .documenttablecreator import DocumentTableCreator
| 26.4
| 54
| 0.878788
| 12
| 132
| 9.666667
| 0.416667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.098485
| 132
| 4
| 55
| 33
| 0.97479
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
24db2ab484bb2c07d8700300b997d065b4694628
| 71,342
|
py
|
Python
|
tests/test_SOFAFile.py
|
HaHeho/pysofaconventions
|
ff08f37fb22c15afeac7c936746e706b7cd2c66d
|
[
"BSD-3-Clause"
] | 28
|
2019-05-10T15:49:05.000Z
|
2021-09-23T01:16:48.000Z
|
tests/test_SOFAFile.py
|
HaHeho/pysofaconventions
|
ff08f37fb22c15afeac7c936746e706b7cd2c66d
|
[
"BSD-3-Clause"
] | 5
|
2019-05-10T15:36:58.000Z
|
2021-02-13T09:53:33.000Z
|
tests/test_SOFAFile.py
|
HaHeho/pysofaconventions
|
ff08f37fb22c15afeac7c936746e706b7cd2c66d
|
[
"BSD-3-Clause"
] | 8
|
2019-05-17T23:37:10.000Z
|
2020-08-28T02:26:39.000Z
|
# -*- coding: utf-8 -*-
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
#
# Copyright (c) 2018, Eurecat / UPF
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the <organization> nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
#
# @file test_SOFAFile.py
# @author Andrés Pérez-López
# @date 29/08/2018
#
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
import pytest
import os
import tempfile
from netCDF4 import Dataset
import time
from pysofaconventions import *
import sys
import numpy as np
from collections import OrderedDict
def test_isValid():
def raiseWarning(warningString):
sofafile = SOFAFile(path, 'r')
with pytest.warns(SOFAWarning) as record:
assert not sofafile.isValid()
assert warningString in str(record[-1].message)
sofafile.close()
# Missing Global Attributes
fd, path = tempfile.mkstemp()
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.close()
sofafile = SOFAFile(path, 'r')
raiseWarning('Missing required attribute')
sofafile.close()
# All required attributes, but Invalid SOFA convention
rootgrp = Dataset(path, 'a')
rootgrp.Conventions = 'NOT_SOFA'
rootgrp.Version = '1.0'
rootgrp.SOFAConventions = 'AmbisonicsDRIR'
rootgrp.SOFAConventionsVersion = '0.1'
rootgrp.APIName = 'pysofaconventions'
rootgrp.APIVersion = '0.1'
rootgrp.APIVersion = '0.1'
rootgrp.AuthorContact = 'andres.perez@eurecat.org'
rootgrp.Organization = 'Eurecat - UPF'
rootgrp.License = 'WTFPL - Do What the Fuck You Want to Public License'
rootgrp.DataType = 'FIRE'
rootgrp.RoomType = 'reverberant'
rootgrp.DateCreated = time.ctime(time.time())
rootgrp.DateModified = time.ctime(time.time())
rootgrp.Title = 'testpysofaconventions'
rootgrp.close()
sofafile = SOFAFile(path, 'r')
raiseWarning('File convention is not SOFA')
sofafile.close()
# Valid convention, but missing dimensions
rootgrp = Dataset(path, 'a')
rootgrp.Conventions = 'SOFA'
rootgrp.close()
sofafile = SOFAFile(path, 'r')
raiseWarning('Dimension not found: M')
sofafile.close()
# Add required dimensions, missing Listener Variables
rootgrp = Dataset(path, 'a')
rootgrp.createDimension('I', 1)
rootgrp.createDimension('N', 2)
rootgrp.createDimension('C', 3)
rootgrp.createDimension('M', 4)
rootgrp.createDimension('R', 5)
rootgrp.createDimension('E', 6)
rootgrp.close()
sofafile = SOFAFile(path, 'r')
raiseWarning('Missing Variable: ListenerPosition')
sofafile.close()
# Missing Source Variables
rootgrp = Dataset(path, 'a')
listenerPositionVar = rootgrp.createVariable('ListenerPosition', 'f8', ('I', 'C'))
listenerPositionVar.Units = 'metre'
listenerPositionVar.Type = 'cartesian'
rootgrp.close()
sofafile = SOFAFile(path, 'r')
raiseWarning('Missing Variable: SourcePosition')
sofafile.close()
# Missing Receiver Variables
rootgrp = Dataset(path, 'a')
sourcePositionVar = rootgrp.createVariable('SourcePosition', 'f8', ('I', 'C'))
sourcePositionVar.Units = 'metre'
sourcePositionVar.Type = 'cartesian'
rootgrp.close()
sofafile = SOFAFile(path, 'r')
raiseWarning('Missing Variable: ReceiverPosition')
sofafile.close()
# Missing Emitter Variables
rootgrp = Dataset(path, 'a')
receiverPositionVar = rootgrp.createVariable('ReceiverPosition', 'f8', ('R', 'C', 'I'))
receiverPositionVar.Units = 'metre'
receiverPositionVar.Type = 'cartesian'
rootgrp.close()
sofafile = SOFAFile(path, 'r')
raiseWarning('Missing Variable: EmitterPosition')
sofafile.close()
# Missing Data
rootgrp = Dataset(path, 'a')
emitterPositionVar = rootgrp.createVariable('EmitterPosition', 'f8', ('E', 'C', 'I'))
emitterPositionVar.Units = 'metre'
emitterPositionVar.Type = 'cartesian'
rootgrp.close()
sofafile = SOFAFile(path, 'r')
raiseWarning('Missing Data.IR Variable')
sofafile.close()
# # Now it should be fine
rootgrp = Dataset(path, 'a')
rootgrp.createVariable('Data.IR', 'f8', ('M', 'R', 'E', 'N'))
sr = rootgrp.createVariable('Data.SamplingRate', 'f8', ('I',))
sr.Units = 'hertz'
rootgrp.createVariable('Data.Delay', 'f8', ('I','R', 'E'))
rootgrp.close()
sofafile = SOFAFile(path, 'r')
assert sofafile.isValid()
sofafile.close()
os.remove(path)
def test_getFile():
fd, path = tempfile.mkstemp()
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.close()
sofafile = SOFAFile(path, 'r')
assert sofafile.getFile().__dict__ == rootgrp.__dict__
sofafile.close()
os.remove(path)
def test_getFilename():
fd, path = tempfile.mkstemp()
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.close()
sofafile = SOFAFile(path, 'r')
assert sofafile.getFilename() == path
sofafile.close()
os.remove(path)
def test_hasGlobalAttribute():
attributeName = 'bestAttribute'
# Attribute does not exist
fd, path = tempfile.mkstemp()
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.close()
sofafile = SOFAFile(path, 'r')
assert not sofafile.hasGlobalAttribute(attributeName)
sofafile.close()
# Attribute exists
rootgrp = Dataset(path, 'a')
rootgrp.bestAttribute = 'yeah'
rootgrp.close()
sofafile = SOFAFile(path, 'r')
assert sofafile.hasGlobalAttribute(attributeName)
sofafile.close()
os.remove(path)
def test_getGlobalAttributesAsDict():
attrValue1 = 'attrValue1'
attrValue2 = 'attrValue2'
fd, path = tempfile.mkstemp()
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.attr1 = attrValue1
rootgrp.attr2 = attrValue2
attrNameList = rootgrp.ncattrs()
# Compare attribute names
assert attrNameList == ['attr1','attr2']
# Compare attribute values
attrValueList = [getattr(rootgrp,attrName) for attrName in attrNameList]
assert attrValueList == [attrValue1, attrValue2]
rootgrp.close()
os.remove(path)
def test_getGlobalAttributeValue():
attrValue = 'A'
def raiseError(errorString):
sofafile = SOFAFile(path, 'r')
with pytest.raises(SOFAError) as e:
sofafile.getGlobalAttributeValue('attr1')
assert e.match(errorString)
sofafile.close()
# Attribute not found
fd, path = tempfile.mkstemp()
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.close()
raiseError('Attribute not found')
# Attribute Found
rootgrp = Dataset(path, 'a')
rootgrp.attr1 = attrValue
rootgrp.close()
sofafile = SOFAFile(path, 'r')
assert sofafile.getGlobalAttributeValue('attr1') == attrValue
os.remove(path)
def test_getDimensionsAsDict():
# Empty dictionary
fd, path = tempfile.mkstemp()
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.close()
emptyDict = {}
sofafile = SOFAFile(path, 'r')
assert sofafile.getDimensionsAsDict() == emptyDict
sofafile.close()
# Non-empty dictionary
rootgrp = Dataset(path, 'a')
dimA = rootgrp.createDimension('A',1)
dimB = rootgrp.createDimension('B',2)
rootgrp.close()
targetDict = OrderedDict([
('A',dimA),
('B',dimB)
])
sofafile = SOFAFile(path, 'r')
# Assert dimension names are equal
for k1, k2 in zip(targetDict.keys(), sofafile.getDimensionsAsDict().keys()):
assert k1 == k2
# Assert dimension instances (value and name) are equal
for v1, v2 in zip(targetDict.values(), sofafile.getDimensionsAsDict().values()):
assert v1.name == v2.name
assert v1.size == v2.size
sofafile.close()
os.remove(path)
def test_getDimension():
def raiseError(errorString):
sofafile = SOFAFile(path, 'r')
sofaNcFile = sofafile.ncfile
with pytest.raises(SOFAError) as e:
sofaNcFile.getDimension('A')
assert e.match(errorString)
sofafile.close()
# Dimension not found
fd, path = tempfile.mkstemp()
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.close()
raiseError('Dimension not found')
# Dimension found
rootgrp = Dataset(path, 'a')
dimA = rootgrp.createDimension('A', 1)
rootgrp.close()
sofafile = SOFAFile(path, 'r')
assert sofafile.getDimension('A') == sofafile.getDimensionsAsDict()['A']
sofafile.close()
os.remove(path)
def test_getDimensionSize():
variableName = 'A'
def raiseError(errorString):
sofafile = SOFAFile(path, 'r')
sofaNcFile = sofafile.ncfile
with pytest.raises(SOFAError) as e:
sofaNcFile.getDimensionSize(variableName)
assert e.match(errorString)
sofafile.close()
# Dimension not found
fd, path = tempfile.mkstemp()
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.close()
raiseError('Dimension not found')
# Dimension found
rootgrp = Dataset(path, 'a')
dimA = rootgrp.createDimension(variableName, 1)
rootgrp.close()
sofafile = SOFAFile(path, 'r')
assert sofafile.getDimensionSize(variableName) == sofafile.getDimensionsAsDict()[variableName].size
sofafile.close()
os.remove(path)
def test_getVariablesAsDict():
variableName1 = 'CoolVariable1'
variableName2 = 'CoolVariable2'
variableValue1 = 1
variableValue2 = 2
fd, path = tempfile.mkstemp()
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.createDimension('I', 1)
var1 = rootgrp.createVariable(variableName1, 'f8', ('I'))
var1[:] = variableValue1
var2 = rootgrp.createVariable(variableName2, 'f8', ('I'))
var2[:] = variableValue2
variableDict = OrderedDict(rootgrp.variables)
# print(rootgrp.variables)
rootgrp.close()
sofafile = SOFAFile(path, 'r')
# Assert variable names are equal
for k1, k2 in zip(variableDict.keys(), sofafile.getVariablesAsDict().keys()):
assert k1 == k2
sofafile.close()
os.remove(path)
def test_hasVariable():
variableName = 'CoolVariable'
# Variable does not exist
fd, path = tempfile.mkstemp()
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.close()
sofafile = SOFAFile(path, 'r')
assert not sofafile.hasVariable(variableName)
sofafile.close()
# Variable exists
rootgrp = Dataset(path, 'a')
rootgrp.createVariable(variableName, 'f8', ())
rootgrp.close()
sofafile = SOFAFile(path, 'r')
assert sofafile.hasVariable(variableName)
sofafile.close()
os.remove(path)
def test_getVariableShape():
variableName = 'CoolVariable'
dimensions = {'DIM1':1,'DIM2':2,'DIM3':3}
numDimensions = len(dimensions)
def raiseError(errorString):
sofafile = SOFAFile(path, 'r')
with pytest.raises(SOFAError) as e:
sofafile.getVariableShape(variableName)
assert e.match(errorString)
sofafile.close()
# No such variable: SOFAError
fd, path = tempfile.mkstemp()
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.close()
raiseError('Variable not found')
# Variable exists
rootgrp = Dataset(path, 'a')
for name, value in zip(dimensions.keys(), dimensions.values()):
rootgrp.createDimension(name, value)
var = rootgrp.createVariable(variableName, 'f8', tuple(dimensions.keys()))
rootgrp.close()
sofafile = SOFAFile(path, 'r')
assert sofafile.getVariableShape(variableName) == tuple(dimensions.values())
sofafile.close()
os.remove(path)
def test_getVariableDimensionality():
variableName = 'CoolVariable'
dimensions = {'DIM1':1,'DIM2':2}
numDimensions = len(dimensions)
def raiseError(errorString):
sofafile = SOFAFile(path, 'r')
with pytest.raises(SOFAError) as e:
sofafile.getVariableDimensionality(variableName)
assert e.match(errorString)
sofafile.close()
# No such variable: SOFAError
fd, path = tempfile.mkstemp()
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.close()
raiseError('Variable not found')
# Variable exists
rootgrp = Dataset(path, 'a')
for name, value in zip(dimensions.keys(), dimensions.values()):
rootgrp.createDimension(name, value)
var = rootgrp.createVariable(variableName, 'f8', tuple(dimensions.keys()))
rootgrp.close()
sofafile = SOFAFile(path, 'r')
assert sofafile.getVariableDimensionality(variableName) == numDimensions
sofafile.close()
os.remove(path)
def test_getVariableValue():
variableName = 'CoolVariable'
variableDim = 1
variableValue = np.random.rand(variableDim)
def raiseError(errorString):
sofafile = SOFAFile(path, 'r')
with pytest.raises(SOFAError) as e:
sofafile.getVariableValue(variableName)
assert e.match(errorString)
sofafile.close()
# No such variable: SOFAError
fd, path = tempfile.mkstemp()
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.close()
raiseError('Variable not found')
# Variable exists
rootgrp = Dataset(path, 'a')
rootgrp.createDimension('I', variableDim)
var = rootgrp.createVariable(variableName, 'f8', ('I'))
var[:] = variableValue
rootgrp.close()
sofafile = SOFAFile(path, 'r')
assert np.array_equal(sofafile.getVariableValue(variableName), variableValue)
sofafile.close()
os.remove(path)
def test_getVariableInstance():
variableName = 'CoolVariable'
def raiseError(errorString):
sofafile = SOFAFile(path, 'r')
with pytest.raises(SOFAError) as e:
sofafile.getVariableInstance(variableName)
assert e.match(errorString)
sofafile.close()
# No such variable: SOFAError
fd, path = tempfile.mkstemp()
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.close()
raiseError('Variable not found')
# Variable exists
rootgrp = Dataset(path, 'a')
rootgrp.createVariable(variableName, 'f8', ())
rootgrp.close()
sofafile = SOFAFile(path, 'r')
assert sofafile.getVariableInstance(variableName).__dict__ == rootgrp[variableName].__dict__
sofafile.close()
os.remove(path)
def test_getVariableAttributeValue():
variableName = 'CoolVariable'
attributeValue = 'coolAttributeValue'
def raiseError(errorString):
sofafile = SOFAFile(path, 'r')
with pytest.raises(SOFAError) as e:
sofafile.getVariableAttributeValue(variableName,'coolAttribute')
assert e.match(errorString)
sofafile.close()
# No such variable: SOFAError
fd, path = tempfile.mkstemp()
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.close()
raiseError('Variable not found')
# Variable exists, but not the attribute: None
rootgrp = Dataset(path, 'a')
rootgrp.createVariable(variableName, 'f8', ())
rootgrp.close()
sofafile = SOFAFile(path, 'r')
assert sofafile.getVariableAttributeValue(variableName,'coolAttribute') == None
sofafile.close()
# Both variable and attribute exist
rootgrp = Dataset(path, 'a')
var = rootgrp.variables[variableName]
var.coolAttribute = attributeValue
rootgrp.close()
sofafile = SOFAFile(path, 'r')
assert sofafile.getVariableAttributeValue(variableName,'coolAttribute') == attributeValue
sofafile.close()
os.remove(path)
def test_getPositionVariableInfo():
variableName = 'CoolVariable'
unitsVar = 'myUnits'
typeVar = 'greatType'
def raiseError(errorString):
sofafile = SOFAFile(path, 'r')
with pytest.raises(SOFAError) as e:
sofafile.getPositionVariableInfo(variableName)
assert e.match(errorString)
sofafile.close()
# No such variable: SOFAError
fd, path = tempfile.mkstemp()
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.close()
raiseError('Variable not found')
# Variable exists, but not the attributes: None
rootgrp = Dataset(path, 'a')
rootgrp.createVariable(variableName, 'f8', ())
rootgrp.close()
sofafile = SOFAFile(path, 'r')
assert sofafile.getPositionVariableInfo(variableName) == (None, None)
sofafile.close()
# Both variable and attributes exist
rootgrp = Dataset(path, 'a')
var = rootgrp.variables[variableName]
var.Units = unitsVar
var.Type = typeVar
rootgrp.close()
sofafile = SOFAFile(path, 'r')
assert sofafile.getPositionVariableInfo(variableName) == (unitsVar, typeVar)
sofafile.close()
os.remove(path)
def test_hasListenerView():
fd, path = tempfile.mkstemp()
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.close()
assert not SOFAFile(path, 'r').hasListenerView()
rootgrp = Dataset(path, 'a')
rootgrp.createVariable('ListenerView', 'f8', ())
rootgrp.close()
assert SOFAFile(path, 'r').hasListenerView()
os.remove(path)
def test_hasListenerUp():
fd, path = tempfile.mkstemp()
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.close()
assert not SOFAFile(path, 'r').hasListenerUp()
rootgrp = Dataset(path, 'a')
rootgrp.createVariable('ListenerUp', 'f8', ())
rootgrp.close()
assert SOFAFile(path, 'r').hasListenerUp()
os.remove(path)
def test_hasSourceView():
fd, path = tempfile.mkstemp()
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.close()
assert not SOFAFile(path, 'r').hasSourceView()
rootgrp = Dataset(path, 'a')
rootgrp.createVariable('SourceView', 'f8', ())
rootgrp.close()
assert SOFAFile(path, 'r').hasSourceView()
os.remove(path)
def test_hasSourceUp():
fd, path = tempfile.mkstemp()
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.close()
assert not SOFAFile(path, 'r').hasSourceUp()
rootgrp = Dataset(path, 'a')
rootgrp.createVariable('SourceUp', 'f8', ())
rootgrp.close()
assert SOFAFile(path, 'r').hasSourceUp()
os.remove(path)
def test_hasReceiverView():
fd, path = tempfile.mkstemp()
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.close()
assert not SOFAFile(path, 'r').hasReceiverView()
rootgrp = Dataset(path, 'a')
rootgrp.createVariable('ReceiverView', 'f8', ())
rootgrp.close()
assert SOFAFile(path, 'r').hasReceiverView()
os.remove(path)
def test_hasReceiverUp():
fd, path = tempfile.mkstemp()
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.close()
assert not SOFAFile(path, 'r').hasReceiverUp()
rootgrp = Dataset(path, 'a')
rootgrp.createVariable('ReceiverUp', 'f8', ())
rootgrp.close()
assert SOFAFile(path, 'r').hasReceiverUp()
os.remove(path)
def test_hasEmitterView():
fd, path = tempfile.mkstemp()
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.close()
assert not SOFAFile(path, 'r').hasEmitterView()
rootgrp = Dataset(path, 'a')
rootgrp.createVariable('EmitterView', 'f8', ())
rootgrp.close()
assert SOFAFile(path, 'r').hasEmitterView()
os.remove(path)
def test_hasEmitterUp():
fd, path = tempfile.mkstemp()
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.close()
assert not SOFAFile(path, 'r').hasEmitterUp()
rootgrp = Dataset(path, 'a')
rootgrp.createVariable('EmitterUp', 'f8', ())
rootgrp.close()
assert SOFAFile(path, 'r').hasEmitterUp()
os.remove(path)
def test_getListenerPositionInfo():
units = 'Unit'
type = 'Type'
targetTuple = (units, type)
fd, path = tempfile.mkstemp()
rootgrp = Dataset(path, 'w', format='NETCDF4')
var = rootgrp.createVariable('ListenerPosition', 'f8', ())
var.Units = units
var.Type = type
rootgrp.close()
assert SOFAFile(path, 'r').getListenerPositionInfo() == targetTuple
os.remove(path)
def test_getListenerUpInfo():
units = 'Unit'
type = 'Type'
targetTuple = (units, type)
fd, path = tempfile.mkstemp()
rootgrp = Dataset(path, 'w', format='NETCDF4')
var = rootgrp.createVariable('ListenerUp', 'f8', ())
var.Units = units
var.Type = type
rootgrp.close()
assert SOFAFile(path, 'r').getListenerUpInfo() == targetTuple
os.remove(path)
def test_getListenerViewInfo():
units = 'Unit'
type = 'Type'
targetTuple = (units, type)
fd, path = tempfile.mkstemp()
rootgrp = Dataset(path, 'w', format='NETCDF4')
var = rootgrp.createVariable('ListenerView', 'f8', ())
var.Units = units
var.Type = type
rootgrp.close()
assert SOFAFile(path, 'r').getListenerViewInfo() == targetTuple
os.remove(path)
def test_getSourcePositionInfo():
units = 'Unit'
type = 'Type'
targetTuple = (units, type)
fd, path = tempfile.mkstemp()
rootgrp = Dataset(path, 'w', format='NETCDF4')
var = rootgrp.createVariable('SourcePosition', 'f8', ())
var.Units = units
var.Type = type
rootgrp.close()
assert SOFAFile(path, 'r').getSourcePositionInfo() == targetTuple
os.remove(path)
def test_getSourceUpInfo():
units = 'Unit'
type = 'Type'
targetTuple = (units, type)
fd, path = tempfile.mkstemp()
rootgrp = Dataset(path, 'w', format='NETCDF4')
var = rootgrp.createVariable('SourceUp', 'f8', ())
var.Units = units
var.Type = type
rootgrp.close()
assert SOFAFile(path, 'r').getSourceUpInfo() == targetTuple
os.remove(path)
def test_getSourceViewInfo():
units = 'Unit'
type = 'Type'
targetTuple = (units, type)
fd, path = tempfile.mkstemp()
rootgrp = Dataset(path, 'w', format='NETCDF4')
var = rootgrp.createVariable('SourceView', 'f8', ())
var.Units = units
var.Type = type
rootgrp.close()
assert SOFAFile(path, 'r').getSourceViewInfo() == targetTuple
os.remove(path)
def test_getReceiverPositionInfo():
units = 'Unit'
type = 'Type'
targetTuple = (units, type)
fd, path = tempfile.mkstemp()
rootgrp = Dataset(path, 'w', format='NETCDF4')
var = rootgrp.createVariable('ReceiverPosition', 'f8', ())
var.Units = units
var.Type = type
rootgrp.close()
assert SOFAFile(path, 'r').getReceiverPositionInfo() == targetTuple
os.remove(path)
def test_getReceiverUpInfo():
units = 'Unit'
type = 'Type'
targetTuple = (units, type)
fd, path = tempfile.mkstemp()
rootgrp = Dataset(path, 'w', format='NETCDF4')
var = rootgrp.createVariable('ReceiverUp', 'f8', ())
var.Units = units
var.Type = type
rootgrp.close()
assert SOFAFile(path, 'r').getReceiverUpInfo() == targetTuple
os.remove(path)
def test_getReceiverViewInfo():
units = 'Unit'
type = 'Type'
targetTuple = (units, type)
fd, path = tempfile.mkstemp()
rootgrp = Dataset(path, 'w', format='NETCDF4')
var = rootgrp.createVariable('ReceiverView', 'f8', ())
var.Units = units
var.Type = type
rootgrp.close()
assert SOFAFile(path, 'r').getReceiverViewInfo() == targetTuple
os.remove(path)
def test_getEmitterPositionInfo():
units = 'Unit'
type = 'Type'
targetTuple = (units, type)
fd, path = tempfile.mkstemp()
rootgrp = Dataset(path, 'w', format='NETCDF4')
var = rootgrp.createVariable('EmitterPosition', 'f8', ())
var.Units = units
var.Type = type
rootgrp.close()
assert SOFAFile(path, 'r').getEmitterPositionInfo() == targetTuple
os.remove(path)
def test_getEmitterUpInfo():
units = 'Unit'
type = 'Type'
targetTuple = (units, type)
fd, path = tempfile.mkstemp()
rootgrp = Dataset(path, 'w', format='NETCDF4')
var = rootgrp.createVariable('EmitterUp', 'f8', ())
var.Units = units
var.Type = type
rootgrp.close()
assert SOFAFile(path, 'r').getEmitterUpInfo() == targetTuple
os.remove(path)
def test_getEmitterViewInfo():
units = 'Unit'
type = 'Type'
targetTuple = (units, type)
fd, path = tempfile.mkstemp()
rootgrp = Dataset(path, 'w', format='NETCDF4')
var = rootgrp.createVariable('EmitterView', 'f8', ())
var.Units = units
var.Type = type
rootgrp.close()
assert SOFAFile(path, 'r').getEmitterViewInfo() == targetTuple
os.remove(path)
def test_getListenerPositionValues():
dim1 = 2
dim2 = 5
targetArray = np.random.rand(dim1,dim2)
fd, path = tempfile.mkstemp()
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.createDimension('DIM1', dim1)
rootgrp.createDimension('DIM2', dim2)
var = rootgrp.createVariable('ListenerPosition', 'f8', ('DIM1','DIM2'))
var[:] = targetArray
rootgrp.close()
assert np.array_equal(SOFAFile(path, 'r').getListenerPositionValues(), targetArray)
os.remove(path)
def test_getListenerUpValues():
dim1 = 2
dim2 = 5
targetArray = np.random.rand(dim1,dim2)
fd, path = tempfile.mkstemp()
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.createDimension('DIM1', dim1)
rootgrp.createDimension('DIM2', dim2)
var = rootgrp.createVariable('ListenerUp', 'f8', ('DIM1','DIM2'))
var[:] = targetArray
rootgrp.close()
assert np.array_equal(SOFAFile(path, 'r').getListenerUpValues(), targetArray)
os.remove(path)
def test_getListenerViewValues():
dim1 = 2
dim2 = 5
targetArray = np.random.rand(dim1,dim2)
fd, path = tempfile.mkstemp()
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.createDimension('DIM1', dim1)
rootgrp.createDimension('DIM2', dim2)
var = rootgrp.createVariable('ListenerView', 'f8', ('DIM1','DIM2'))
var[:] = targetArray
rootgrp.close()
assert np.array_equal(SOFAFile(path, 'r').getListenerViewValues(), targetArray)
os.remove(path)
def test_getSourcePositionValues():
dim1 = 2
dim2 = 5
targetArray = np.random.rand(dim1,dim2)
fd, path = tempfile.mkstemp()
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.createDimension('DIM1', dim1)
rootgrp.createDimension('DIM2', dim2)
var = rootgrp.createVariable('SourcePosition', 'f8', ('DIM1','DIM2'))
var[:] = targetArray
rootgrp.close()
assert np.array_equal(SOFAFile(path, 'r').getSourcePositionValues(), targetArray)
os.remove(path)
def test_getSourceUpValues():
dim1 = 2
dim2 = 5
targetArray = np.random.rand(dim1,dim2)
fd, path = tempfile.mkstemp()
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.createDimension('DIM1', dim1)
rootgrp.createDimension('DIM2', dim2)
var = rootgrp.createVariable('SourceUp', 'f8', ('DIM1','DIM2'))
var[:] = targetArray
rootgrp.close()
assert np.array_equal(SOFAFile(path, 'r').getSourceUpValues(), targetArray)
os.remove(path)
def test_getSourceViewValues():
dim1 = 2
dim2 = 5
targetArray = np.random.rand(dim1,dim2)
fd, path = tempfile.mkstemp()
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.createDimension('DIM1', dim1)
rootgrp.createDimension('DIM2', dim2)
var = rootgrp.createVariable('SourceView', 'f8', ('DIM1','DIM2'))
var[:] = targetArray
rootgrp.close()
assert np.array_equal(SOFAFile(path, 'r').getSourceViewValues(), targetArray)
os.remove(path)
def test_getReceiverPositionValues():
dim1 = 2
dim2 = 5
targetArray = np.random.rand(dim1,dim2)
fd, path = tempfile.mkstemp()
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.createDimension('DIM1', dim1)
rootgrp.createDimension('DIM2', dim2)
var = rootgrp.createVariable('ReceiverPosition', 'f8', ('DIM1','DIM2'))
var[:] = targetArray
rootgrp.close()
assert np.array_equal(SOFAFile(path, 'r').getReceiverPositionValues(), targetArray)
os.remove(path)
def test_getReceiverUpValues():
dim1 = 2
dim2 = 5
targetArray = np.random.rand(dim1,dim2)
fd, path = tempfile.mkstemp()
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.createDimension('DIM1', dim1)
rootgrp.createDimension('DIM2', dim2)
var = rootgrp.createVariable('ReceiverUp', 'f8', ('DIM1','DIM2'))
var[:] = targetArray
rootgrp.close()
assert np.array_equal(SOFAFile(path, 'r').getReceiverUpValues(), targetArray)
os.remove(path)
def test_getReceiverViewValues():
dim1 = 2
dim2 = 5
targetArray = np.random.rand(dim1,dim2)
fd, path = tempfile.mkstemp()
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.createDimension('DIM1', dim1)
rootgrp.createDimension('DIM2', dim2)
var = rootgrp.createVariable('ReceiverView', 'f8', ('DIM1','DIM2'))
var[:] = targetArray
rootgrp.close()
assert np.array_equal(SOFAFile(path, 'r').getReceiverViewValues(), targetArray)
os.remove(path)
def test_getEmitterPositionValues():
dim1 = 2
dim2 = 5
targetArray = np.random.rand(dim1,dim2)
fd, path = tempfile.mkstemp()
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.createDimension('DIM1', dim1)
rootgrp.createDimension('DIM2', dim2)
var = rootgrp.createVariable('EmitterPosition', 'f8', ('DIM1','DIM2'))
var[:] = targetArray
rootgrp.close()
assert np.array_equal(SOFAFile(path, 'r').getEmitterPositionValues(), targetArray)
os.remove(path)
def test_getEmitterUpValues():
dim1 = 2
dim2 = 5
targetArray = np.random.rand(dim1,dim2)
fd, path = tempfile.mkstemp()
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.createDimension('DIM1', dim1)
rootgrp.createDimension('DIM2', dim2)
var = rootgrp.createVariable('EmitterUp', 'f8', ('DIM1','DIM2'))
var[:] = targetArray
rootgrp.close()
assert np.array_equal(SOFAFile(path, 'r').getEmitterUpValues(), targetArray)
os.remove(path)
def test_getEmitterViewValues():
dim1 = 2
dim2 = 5
targetArray = np.random.rand(dim1,dim2)
fd, path = tempfile.mkstemp()
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.createDimension('DIM1', dim1)
rootgrp.createDimension('DIM2', dim2)
var = rootgrp.createVariable('EmitterView', 'f8', ('DIM1','DIM2'))
var[:] = targetArray
rootgrp.close()
assert np.array_equal(SOFAFile(path, 'r').getEmitterViewValues(), targetArray)
os.remove(path)
def test_getDataIR():
dim1 = 2
dim2 = 5
targetArray = np.random.rand(dim1,dim2)
fd, path = tempfile.mkstemp()
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.createDimension('DIM1', dim1)
rootgrp.createDimension('DIM2', dim2)
ir = rootgrp.createVariable('Data.IR', 'f8', ('DIM1','DIM2'))
ir[:] = targetArray
rootgrp.close()
assert np.array_equal(SOFAFile(path, 'r').getDataIR(), targetArray)
os.remove(path)
def test_getDataDelay():
dim1 = 2
targetArray = np.random.rand(dim1)
fd, path = tempfile.mkstemp()
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.createDimension('DIM1', dim1)
delay = rootgrp.createVariable('Data.Delay', 'f8', ('DIM1'))
delay[:] = targetArray
rootgrp.close()
assert np.array_equal(SOFAFile(path, 'r').getDataDelay(), targetArray)
os.remove(path)
def test_getSamplingRate():
dim1 = 2
targetArray = np.random.rand(dim1)
fd, path = tempfile.mkstemp()
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.createDimension('DIM1', dim1)
sr = rootgrp.createVariable('Data.SamplingRate', 'f8', ('DIM1'))
sr[:] = targetArray
assert np.array_equal(SOFAFile(path, 'r').getSamplingRate(), targetArray)
os.remove(path)
def test_getSamplingRateUnits():
dim1 = 2
targetString = 'Kelvin'
fd, path = tempfile.mkstemp()
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.createDimension('DIM1', dim1)
sr = rootgrp.createVariable('Data.SamplingRate', 'f8', ('DIM1'))
sr.Units = targetString
rootgrp.close()
assert SOFAFile(path, 'r').getSamplingRateUnits() == targetString
os.remove(path)
def test_getDataIRChannelOrdering():
dim1 = 2
targetString = 'incredibleChannelOrdering'
fd, path = tempfile.mkstemp()
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.createDimension('DIM1', dim1)
ir = rootgrp.createVariable('Data.IR', 'f8', ('DIM1'))
ir.ChannelOrdering = targetString
rootgrp.close()
assert SOFAFile(path, 'r').getDataIRChannelOrdering() == targetString
os.remove(path)
def test_getDataIRNormalization():
dim1 = 2
targetString = 'amazingNormalization'
fd, path = tempfile.mkstemp()
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.createDimension('DIM1', dim1)
ir = rootgrp.createVariable('Data.IR', 'f8', ('DIM1'))
ir.Normalization = targetString
rootgrp.close()
assert SOFAFile(path, 'r').getDataIRNormalization() == targetString
os.remove(path)
# Custom class for print asserts
class MyOutput(object):
def __init__(self):
self.data = []
def write(self, s):
self.data.append(s)
def __str__(self):
return "".join(self.data)
def test_printSOFAGlobalAttributes():
# Replace sys out by our out to compare the strings
stdout_org = sys.stdout
my_stdout = MyOutput()
# Create custom file with attributes
fd, path = tempfile.mkstemp()
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.A = 'a'
rootgrp.B = 'b'
rootgrp.close()
# This should be the output
targetString = "- A\n\ta\n- B\n\tb\n"
# Produce some output
try:
sys.stdout = my_stdout
SOFAFile(path, 'r').printSOFAGlobalAttributes()
os.remove(path)
finally:
sys.stdout = stdout_org
# Assert
assert str(my_stdout) == targetString
def test_printSOFADimensions():
# Replace sys out by our out to compare the strings
stdout_org = sys.stdout
my_stdout = MyOutput()
# Create custom file with dimensions
fd, path = tempfile.mkstemp()
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.createDimension('A', 1)
rootgrp.createDimension('B', 2)
rootgrp.close()
# This should be the output
targetString = "- A : 1\n- B : 2\n"
# Produce some output
try:
sys.stdout = my_stdout
SOFAFile(path, 'r').printSOFADimensions()
os.remove(path)
finally:
sys.stdout = stdout_org
# Assert
assert str(my_stdout) == targetString
def test_printSOFAVariables():
# Replace sys out by our out to compare the strings
stdout_org = sys.stdout
my_stdout = MyOutput()
# Create custom file with variables
fd, path = tempfile.mkstemp()
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.createDimension('A', 1)
rootgrp.createDimension('B', 2)
rootgrp.createVariable('CoolVariable', 'f8', ('A'))
rootgrp.createVariable('UltraVariable', 'f8', ('A', 'B'))
rootgrp.close()
# This should be the output
targetString = "- CoolVariable = (1,)\n- UltraVariable = (1, 2)\n"
# Produce some output
try:
sys.stdout = my_stdout
SOFAFile(path, 'r').printSOFAVariables()
os.remove(path)
finally:
sys.stdout = stdout_org
# Assert
assert str(my_stdout) == targetString
def test_getSOFADimensionStrings():
fd, path = tempfile.mkstemp()
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.createDimension('M', 1)
rootgrp.createDimension('A', 2)
rootgrp.close()
# Explanatory string for known variables
targetString = "Number of measurements (M)"
assert SOFAFile(path, 'r').getSOFADimensionStrings('M') == targetString
# Return symbol for unknown variables
targetString = "A"
assert SOFAFile(path, 'r').getSOFADimensionStrings('A') == targetString
os.remove(path)
def test_getConventionVersion():
# This should be the output for base SOFAFile
targetString = "None.None"
assert SOFAFile.getConventionVersion() == targetString
def test_checkSOFARequiredAttributes():
def raiseError(errorString):
sofafile = SOFAFile(path, 'r')
with pytest.raises(SOFAError) as e:
sofafile.checkSOFARequiredAttributes()
assert e.match(errorString)
sofafile.close()
# SOFA file with missing attributes
fd, path = tempfile.mkstemp()
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.close()
raiseError('Missing required attribute: APIName')
# Assert all attributes
rootgrp = Dataset(path, 'a')
rootgrp.Conventions = 'SOFA'
rootgrp.Version = '1.0'
rootgrp.SOFAConventions = 'AmbisonicsDRIR'
rootgrp.SOFAConventionsVersion = '0.1'
rootgrp.APIName = 'pysofaconventions'
rootgrp.APIVersion = '0.1'
rootgrp.APIVersion = '0.1'
rootgrp.AuthorContact = 'andres.perez@eurecat.org'
rootgrp.Organization = 'Eurecat - UPF'
rootgrp.License = 'WTFPL - Do What the Fuck You Want to Public License'
rootgrp.DataType = 'FIRE'
rootgrp.RoomType = 'reverberant'
rootgrp.DateCreated = time.ctime(time.time())
rootgrp.DateModified = time.ctime(time.time())
rootgrp.Title = 'testpysofaconventions'
rootgrp.close()
assert SOFAFile(path, 'r').checkSOFARequiredAttributes()
os.remove(path)
def test_checkSOFAConvention():
def raiseError(errorString):
sofafile = SOFAFile(path, 'r')
with pytest.raises(SOFAError) as e:
sofafile.checkSOFAConvention()
assert e.match(errorString)
sofafile.close()
# Incorrect Conventions string
fd, path = tempfile.mkstemp()
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.Conventions = 'Incorrect_string'
rootgrp.close()
raiseError('File convention is not SOFA')
# Correct string
rootgrp = Dataset(path, 'a')
rootgrp.Conventions = 'SOFA'
rootgrp.close()
assert SOFAFile(path, 'r').checkSOFAConvention()
os.remove(path)
def test_checkSOFADimensionsAreValid():
def raiseError(errorString):
sofafile = SOFAFile(path, 'r')
with pytest.raises(SOFAError) as e:
sofafile.checkSOFADimensionsAreValid()
assert e.match(errorString)
sofafile.close()
## MISSING DIMENSIONS
# Missing M
fd, path = tempfile.mkstemp()
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.close()
raiseError('Dimension not found: M')
# Missing N
rootgrp = Dataset(path, 'a')
rootgrp.createDimension('M', 1)
rootgrp.close()
raiseError('Dimension not found: N')
# Missing R
rootgrp = Dataset(path, 'a')
rootgrp.createDimension('N', 1)
rootgrp.close()
raiseError('Dimension not found: R')
# Missing E
rootgrp = Dataset(path, 'a')
rootgrp.createDimension('R', 1)
rootgrp.close()
raiseError('Dimension not found: E')
# Missing I
rootgrp = Dataset(path, 'a')
rootgrp.createDimension('E', 1)
rootgrp.close()
raiseError('Dimension not found: I')
# Missing C
rootgrp = Dataset(path, 'a')
rootgrp.createDimension('I', 1)
rootgrp.close()
raiseError('Dimension not found: C')
os.remove(path)
## INCORRECT VALUES FOR DIMENSIONS
# M < 1
fd, path = tempfile.mkstemp()
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.createDimension('M', 0)
rootgrp.createDimension('N', 1)
rootgrp.createDimension('R', 1)
rootgrp.createDimension('E', 1)
rootgrp.createDimension('I', 1)
rootgrp.createDimension('C', 3)
rootgrp.close()
raiseError('Incorrect dimension size for M')
os.remove(path)
# N < 1
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.createDimension('M', 1)
rootgrp.createDimension('N', 0)
rootgrp.createDimension('R', 1)
rootgrp.createDimension('E', 1)
rootgrp.createDimension('I', 1)
rootgrp.createDimension('C', 3)
rootgrp.close()
raiseError('Incorrect dimension size for N')
os.remove(path)
# R < 1
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.createDimension('M', 1)
rootgrp.createDimension('N', 1)
rootgrp.createDimension('R', 0)
rootgrp.createDimension('E', 1)
rootgrp.createDimension('I', 1)
rootgrp.createDimension('C', 3)
rootgrp.close()
raiseError('Incorrect dimension size for R')
os.remove(path)
# E < 1
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.createDimension('M', 1)
rootgrp.createDimension('N', 1)
rootgrp.createDimension('R', 1)
rootgrp.createDimension('E', 0)
rootgrp.createDimension('I', 1)
rootgrp.createDimension('C', 3)
rootgrp.close()
raiseError('Incorrect dimension size for E')
os.remove(path)
# I != 1
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.createDimension('M', 1)
rootgrp.createDimension('N', 1)
rootgrp.createDimension('R', 1)
rootgrp.createDimension('E', 1)
rootgrp.createDimension('I', 2)
rootgrp.createDimension('C', 3)
rootgrp.close()
raiseError('Incorrect dimension size for I')
os.remove(path)
# C != 3
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.createDimension('M', 1)
rootgrp.createDimension('N', 1)
rootgrp.createDimension('R', 1)
rootgrp.createDimension('E', 1)
rootgrp.createDimension('I', 1)
rootgrp.createDimension('C', 4)
rootgrp.close()
raiseError('Incorrect dimension size for C')
os.remove(path)
# All fine
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.createDimension('M', 1)
rootgrp.createDimension('N', 1)
rootgrp.createDimension('R', 1)
rootgrp.createDimension('E', 1)
rootgrp.createDimension('I', 1)
rootgrp.createDimension('C', 3)
rootgrp.close()
assert SOFAFile(path, 'r').checkSOFADimensionsAreValid()
os.remove(path)
def test_checkListenerVariables():
def raiseError(errorString):
sofafile = SOFAFile(path, 'r')
with pytest.raises(SOFAError) as e:
sofafile.checkListenerVariables()
assert e.match(errorString)
sofafile.close()
# Missing ListenerPosition Variable
fd, path = tempfile.mkstemp()
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.close()
raiseError('Missing Variable: ListenerPosition')
# Missing ListenerPosition.Units
rootgrp = Dataset(path, 'a')
rootgrp.createDimension('I', 1)
rootgrp.createDimension('C', 3)
rootgrp.createDimension('M', 2)
rootgrp.createVariable('ListenerPosition', 'f8', ('I', 'C'))
rootgrp.close()
raiseError('Missing Variable Attribute: ListenerPosition.Units')
# Missing ListenerPosition.Coordinates
rootgrp = Dataset(path, 'a')
listenerPositionVar = rootgrp.variables['ListenerPosition']
listenerPositionVar.Units = 'metre'
rootgrp.close()
raiseError('Missing Variable Attribute: ListenerPosition.Coordinates')
# Add ListenerPosition.Coordinates, now it should be fine
rootgrp = Dataset(path, 'a')
listenerPositionVar = rootgrp.variables['ListenerPosition']
listenerPositionVar.Type = 'cartesian'
rootgrp.close()
assert SOFAFile(path, 'r').checkListenerVariables()
os.remove(path)
# Add ListenerUp, Units and coordinates not mandatoru
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.createDimension('I', 1)
rootgrp.createDimension('C', 3)
rootgrp.createDimension('M', 1)
listenerPositionVar = rootgrp.createVariable('ListenerPosition', 'f8', ('I', 'C'))
listenerPositionVar.Units = 'metre'
listenerPositionVar.Type = 'cartesian'
rootgrp.createVariable('ListenerUp', 'f8', ('I', 'C'))
rootgrp.close()
# v0.1.4, ListenerUp views and units are not mandatory!
raiseError('ListenerUp exists but not ListenerView')
# Add ListenerView
# If "SingleRoomDRIR", units is not mandatory, but coordinates yes
rootgrp = Dataset(path, 'a')
rootgrp.SOFAConventions = 'SingleRoomDRIR'
rootgrp.createVariable('ListenerView', 'f8', ('I', 'C'))
rootgrp.close()
raiseError('Missing Variable Attribute: ListenerView.Coordinates')
# Add ListenerView coordinates, not it's all right.
rootgrp = Dataset(path, 'a')
listenerViewVar = rootgrp.variables['ListenerView']
listenerViewVar.Type = 'cartesian'
rootgrp.close()
# If not "SingleRoomDRIR", units _is_ mandatory
rootgrp = Dataset(path, 'a')
rootgrp.SOFAConventions = 'SimpleFreeFieldHRIR'
rootgrp.close()
raiseError('Missing Variable Attribute: ListenerView.Units')
# All right
rootgrp = Dataset(path, 'a')
listenerViewVar = rootgrp.variables['ListenerView']
listenerViewVar.Units = 'metre'
rootgrp.close()
assert SOFAFile(path, 'r').checkListenerVariables()
os.remove(path)
def test_checkSourceVariables():
def raiseError(errorString):
sofafile = SOFAFile(path, 'r')
with pytest.raises(SOFAError) as e:
sofafile.checkSourceVariables()
assert e.match(errorString)
sofafile.close()
# Missing SourcePosition Variable
fd, path = tempfile.mkstemp()
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.close()
raiseError('Missing Variable: SourcePosition')
# Missing SourcePosition.Units
rootgrp = Dataset(path, 'a')
rootgrp.createDimension('I', 1)
rootgrp.createDimension('C', 3)
rootgrp.createDimension('M', 2)
rootgrp.createVariable('SourcePosition', 'f8', ('I', 'C'))
rootgrp.close()
raiseError('Missing Variable Attribute: SourcePosition.Units')
# Missing SourcePosition.Coordinates
rootgrp = Dataset(path, 'a')
sourcePositionVar = rootgrp.variables['SourcePosition']
sourcePositionVar.Units = 'metre'
rootgrp.close()
raiseError('Missing Variable Attribute: SourcePosition.Coordinates')
# Add SourcePosition.Coordinates, now it should be fine
rootgrp = Dataset(path, 'a')
sourcePositionVar = rootgrp.variables['SourcePosition']
sourcePositionVar.Type = 'cartesian'
rootgrp.close()
assert SOFAFile(path, 'r').checkSourceVariables()
os.remove(path)
# Add SourceUp, missing Units
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.createDimension('I', 1)
rootgrp.createDimension('C', 3)
rootgrp.createDimension('M', 1)
sourcePositionVar = rootgrp.createVariable('SourcePosition', 'f8', ('I', 'C'))
sourcePositionVar.Units = 'metre'
sourcePositionVar.Type = 'cartesian'
rootgrp.createVariable('SourceUp', 'f8', ('I', 'C'))
rootgrp.close()
raiseError('SourceUp exists but not SourceView')
# Add SourceView
# If "SingleRoomDRIR", units is not mandatory, but coordinates yes
rootgrp = Dataset(path, 'a')
rootgrp.SOFAConventions = 'SingleRoomDRIR'
rootgrp.createVariable('SourceView', 'f8', ('I', 'C'))
rootgrp.close()
raiseError('Missing Variable Attribute: SourceView.Coordinates')
# Add SourceView coordinates, not it's all right.
rootgrp = Dataset(path, 'a')
sourceViewVar = rootgrp.variables['SourceView']
sourceViewVar.Type = 'cartesian'
rootgrp.close()
# If not "SingleRoomDRIR", units _is_ mandatory
rootgrp = Dataset(path, 'a')
rootgrp.SOFAConventions = 'SimpleFreeFieldHRIR'
rootgrp.close()
raiseError('Missing Variable Attribute: SourceView.Units')
# All right
rootgrp = Dataset(path, 'a')
sourceViewVar = rootgrp.variables['SourceView']
sourceViewVar.Units = 'metre'
rootgrp.close()
assert SOFAFile(path, 'r').checkSourceVariables()
os.remove(path)
def test_checkReceiverVariables():
def raiseError(errorString):
sofafile = SOFAFile(path, 'r')
with pytest.raises(SOFAError) as e:
sofafile.checkReceiverVariables()
assert e.match(errorString)
sofafile.close()
# Missing ReceiverPosition Variable
fd, path = tempfile.mkstemp()
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.close()
raiseError('Missing Variable: ReceiverPosition')
# Missing ReceiverPosition.Units
rootgrp = Dataset(path, 'a')
rootgrp.createDimension('R', 1)
rootgrp.createDimension('C', 3)
rootgrp.createDimension('I', 1)
rootgrp.createDimension('M', 2)
rootgrp.createVariable('ReceiverPosition', 'f8', ('R', 'C', 'I'))
rootgrp.close()
raiseError('Missing Variable Attribute: ReceiverPosition.Units')
# Missing ReceiverPosition.Coordinates
rootgrp = Dataset(path, 'a')
receiverPositionVar = rootgrp.variables['ReceiverPosition']
receiverPositionVar.Units = 'metre'
rootgrp.close()
raiseError('Missing Variable Attribute: ReceiverPosition.Coordinates')
# Add ReceiverPosition.Coordinates, now it should be fine
rootgrp = Dataset(path, 'a')
receiverPositionVar = rootgrp.variables['ReceiverPosition']
receiverPositionVar.Type = 'cartesian'
rootgrp.close()
assert SOFAFile(path, 'r').checkReceiverVariables()
os.remove(path)
# Add ReceiverUp, missing Units
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.createDimension('R', 1)
rootgrp.createDimension('C', 3)
rootgrp.createDimension('I', 1)
rootgrp.createDimension('M', 1)
receiverPositionVar = rootgrp.createVariable('ReceiverPosition', 'f8', ('R', 'C', 'I'))
receiverPositionVar.Units = 'metre'
receiverPositionVar.Type = 'cartesian'
rootgrp.createVariable('ReceiverUp', 'f8', ('R', 'C', 'I'))
rootgrp.close()
raiseError('Missing Variable Attribute: ReceiverUp.Units')
# Missing ReceiverUp.Coordinates
rootgrp = Dataset(path, 'a')
receiverUpVar = rootgrp.variables['ReceiverUp']
receiverUpVar.Units = 'metre'
rootgrp.close()
raiseError('Missing Variable Attribute: ReceiverUp.Coordinates')
# Add ReceiverView, missing Units
rootgrp = Dataset(path, 'a')
receiverUpVar = rootgrp.variables['ReceiverUp']
receiverUpVar.Type = 'cartesian'
rootgrp.createVariable('ReceiverView', 'f8', ('R', 'C', 'I'))
rootgrp.close()
raiseError('Missing Variable Attribute: ReceiverView.Units')
# Missing ReceiverView.Coordinates
rootgrp = Dataset(path, 'a')
receiverViewVar = rootgrp.variables['ReceiverView']
receiverViewVar.Units = 'metre'
rootgrp.close()
raiseError('Missing Variable Attribute: ReceiverView.Coordinates')
# Add ReceiverUp, now it's all fine
rootgrp = Dataset(path, 'a')
receiverViewVar = rootgrp.variables['ReceiverView']
receiverViewVar.Type = 'cartesian'
rootgrp.close()
assert SOFAFile(path, 'r').checkReceiverVariables()
os.remove(path)
def test_checkEmitterVariables():
def raiseError(errorString):
sofafile = SOFAFile(path, 'r')
with pytest.raises(SOFAError) as e:
sofafile.checkEmitterVariables()
assert e.match(errorString)
sofafile.close()
# Missing EmitterPosition Variable
fd, path = tempfile.mkstemp()
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.close()
raiseError('Missing Variable: EmitterPosition')
# Missing EmitterPosition.Units
rootgrp = Dataset(path, 'a')
rootgrp.createDimension('E', 1)
rootgrp.createDimension('C', 3)
rootgrp.createDimension('I', 1)
rootgrp.createDimension('M', 2)
rootgrp.createVariable('EmitterPosition', 'f8', ('E', 'C', 'I'))
rootgrp.close()
raiseError('Missing Variable Attribute: EmitterPosition.Units')
# Missing EmitterPosition.Coordinates
rootgrp = Dataset(path, 'a')
emitterPositionVar = rootgrp.variables['EmitterPosition']
emitterPositionVar.Units = 'metre'
rootgrp.close()
raiseError('Missing Variable Attribute: EmitterPosition.Coordinates')
# Add EmitterPosition.Coordinates, now it should be fine
rootgrp = Dataset(path, 'a')
emitterPositionVar = rootgrp.variables['EmitterPosition']
emitterPositionVar.Type = 'cartesian'
rootgrp.close()
assert SOFAFile(path, 'r').checkEmitterVariables()
os.remove(path)
# Add EmitterUp, missing Units
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.createDimension('E', 1)
rootgrp.createDimension('C', 3)
rootgrp.createDimension('I', 1)
rootgrp.createDimension('M', 1)
emitterPositionVar = rootgrp.createVariable('EmitterPosition', 'f8', ('E', 'C', 'I'))
emitterPositionVar.Units = 'metre'
emitterPositionVar.Type = 'cartesian'
rootgrp.createVariable('EmitterUp', 'f8', ('E', 'C', 'I'))
rootgrp.close()
raiseError('Missing Variable Attribute: EmitterUp.Units')
# Missing EmitterUp.Coordinates
rootgrp = Dataset(path, 'a')
emitterUpVar = rootgrp.variables['EmitterUp']
emitterUpVar.Units = 'metre'
rootgrp.close()
raiseError('Missing Variable Attribute: EmitterUp.Coordinates')
# Add EmitterView, missing Units
rootgrp = Dataset(path, 'a')
emitterUpVar = rootgrp.variables['EmitterUp']
emitterUpVar.Type = 'cartesian'
rootgrp.createVariable('EmitterView', 'f8', ('E', 'C', 'I'))
rootgrp.close()
raiseError('Missing Variable Attribute: EmitterView.Units')
# Missing EmitterView.Coordinates
rootgrp = Dataset(path, 'a')
emitterViewVar = rootgrp.variables['EmitterView']
emitterViewVar.Units = 'metre'
rootgrp.close()
raiseError('Missing Variable Attribute: EmitterView.Coordinates')
# Add EmitterUp, now it's all fine
rootgrp = Dataset(path, 'a')
emitterViewVar = rootgrp.variables['EmitterView']
emitterViewVar.Type = 'cartesian'
rootgrp.close()
assert SOFAFile(path, 'r').checkEmitterVariables()
os.remove(path)
def test_checkDataVariable():
def raiseError(errorString):
sofafile = SOFAFile(path, 'r')
with pytest.raises(SOFAError) as e:
sofafile.checkDataVariable()
assert e.match(errorString)
sofafile.close()
# Missing GLOBAL.DataType attribute
fd, path = tempfile.mkstemp()
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.close()
raiseError('No DataType attribute')
# DataType not known
rootgrp = Dataset(path, 'a')
rootgrp.DataType = 'FAKE'
rootgrp.close()
raiseError('DataType not known')
# Assert correct Data
rootgrp = Dataset(path, 'a')
rootgrp.DataType = 'FIR'
rootgrp.createDimension('M', 1)
rootgrp.createDimension('N', 1)
rootgrp.createDimension('I', 1)
rootgrp.createDimension('R', 1)
rootgrp.createVariable('Data.IR', 'f8', ('M', 'R', 'I'))
sr = rootgrp.createVariable('Data.SamplingRate', 'f8', ('I',))
sr.Units = 'hertz'
rootgrp.createVariable('Data.Delay', 'f8', ('I','R'))
rootgrp.close()
assert SOFAFile(path, 'r').checkDataVariable()
os.remove(path)
def test_isFIRDataType():
# Assert proper data type assignment
fd, path = tempfile.mkstemp()
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.DataType = 'FIR'
rootgrp.close()
assert SOFAFile(path, 'r').isFIRDataType()
os.remove(path)
def test_isFIREDataType():
# Assert proper data type assignment
fd, path = tempfile.mkstemp()
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.DataType = 'FIRE'
rootgrp.close()
assert SOFAFile(path, 'r').isFIREDataType()
os.remove(path)
def test_isSOSDataType():
# Assert proper data type assignment
fd, path = tempfile.mkstemp()
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.DataType = 'SOS'
rootgrp.close()
assert SOFAFile(path, 'r').isSOSDataType()
os.remove(path)
def test_isTFDataType():
# Assert proper data type assignment
fd, path = tempfile.mkstemp()
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.DataType = 'TF'
rootgrp.close()
assert SOFAFile(path, 'r').isTFDataType()
os.remove(path)
def test_checkFIRDataType():
def raiseError(errorString):
sofafile = SOFAFile(path, 'r')
with pytest.raises(SOFAError) as e:
sofafile.checkFIRDataType()
assert e.match(errorString)
sofafile.close()
# Missing M
fd, path = tempfile.mkstemp()
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.close()
raiseError('Dimension not found: M')
# Missing N
rootgrp = Dataset(path, 'a')
rootgrp.createDimension('M', 3)
rootgrp.close()
raiseError('Dimension not found: N')
# Missing I
rootgrp = Dataset(path, 'a')
rootgrp.createDimension('N', 2)
rootgrp.close()
raiseError('Dimension not found: I')
# Missing R
rootgrp = Dataset(path, 'a')
rootgrp.createDimension('I', 1)
rootgrp.close()
raiseError('Dimension not found: R')
# Missing Data.IR
rootgrp = Dataset(path, 'a')
rootgrp.createDimension('R', 4)
rootgrp.close()
raiseError('Missing Data.IR Variable')
# Incorrect Data.IR dimensions
rootgrp = Dataset(path, 'a')
rootgrp.createVariable('Data.IR', 'f8', ('N', 'M', 'R'))
rootgrp.close()
raiseError('Incorrect Data.IR dimensions:')
os.remove(path)
# Missing Data.SamplingRate
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.createDimension('M', 3)
rootgrp.createDimension('N', 2)
rootgrp.createDimension('I', 1)
rootgrp.createDimension('R', 4)
rootgrp.createVariable('Data.IR', 'f8', ('M', 'R', 'N'))
rootgrp.close()
raiseError('Missing Data.SamplingRate Variable')
# Incorrect Data.SamplingRate dimensions
rootgrp = Dataset(path, 'a')
rootgrp.createVariable('Data.SamplingRate', 'f8', ('N', 'M', 'R'))
rootgrp.close()
raiseError('Incorrect Data.SamplingRate dimensions:')
os.remove(path)
# Missing Data.SamplingRate.Units
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.createDimension('M', 3)
rootgrp.createDimension('N', 2)
rootgrp.createDimension('I', 1)
rootgrp.createDimension('R', 4)
rootgrp.createVariable('Data.IR', 'f8', ('M', 'R', 'N'))
rootgrp.createVariable('Data.SamplingRate', 'f8', ('I',))
rootgrp.close()
raiseError('Missing Attribute Data.SamplingRate.Units')
# Add Data.SamplingRate.Units, incorrect Type
rootgrp = Dataset(path, 'a')
sr = rootgrp.variables['Data.SamplingRate']
sr.Units = 'Kelvin'
rootgrp.close()
raiseError('Attribute Data.SamplingRate.Units is not a frequency unit')
# Fix Type. Missing Data.Delay variable
rootgrp = Dataset(path, 'a')
sr = rootgrp.variables['Data.SamplingRate']
sr.Units = 'hertz'
rootgrp.close()
raiseError('Missing Data.Delay Variable')
# Add Data.Delay, incorrect dimensions
rootgrp = Dataset(path, 'a')
rootgrp.createVariable('Data.Delay', 'f8', ('R', 'N'))
rootgrp.close()
raiseError('Incorrect Data.Delay dimensions')
os.remove(path)
# Assert everything is correct
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.createDimension('M', 3)
rootgrp.createDimension('N', 2)
rootgrp.createDimension('I', 1)
rootgrp.createDimension('R', 4)
rootgrp.createVariable('Data.IR', 'f8', ('M', 'R', 'N'))
sr = rootgrp.createVariable('Data.SamplingRate', 'f8', ('I',))
sr.Units = 'hertz'
rootgrp.createVariable('Data.Delay', 'f8', ('I', 'R'))
rootgrp.close()
assert SOFAFile(path, 'r').checkFIRDataType()
os.remove(path)
def test_checkFIREDataType():
def raiseError(errorString):
sofafile = SOFAFile(path, 'r')
with pytest.raises(SOFAError) as e:
sofafile.checkFIREDataType()
assert e.match(errorString)
sofafile.close()
# Missing M
fd, path = tempfile.mkstemp()
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.close()
raiseError('Dimension not found: M')
# Missing N
rootgrp = Dataset(path, 'a')
rootgrp.createDimension('M', 3)
rootgrp.close()
raiseError('Dimension not found: N')
# Missing I
rootgrp = Dataset(path, 'a')
rootgrp.createDimension('N', 2)
rootgrp.close()
raiseError('Dimension not found: I')
# Missing R
rootgrp = Dataset(path, 'a')
rootgrp.createDimension('I', 1)
rootgrp.close()
raiseError('Dimension not found: R')
# Missing E
rootgrp = Dataset(path, 'a')
rootgrp.createDimension('R', 4)
rootgrp.close()
raiseError('Dimension not found: E')
# Missing Data.IR
rootgrp = Dataset(path, 'a')
rootgrp.createDimension('E', 5)
rootgrp.close()
raiseError('Missing Data.IR Variable')
# Incorrect Data.IR dimensions
rootgrp = Dataset(path, 'a')
rootgrp.createVariable('Data.IR', 'f8', ('N', 'E', 'M', 'R'))
rootgrp.close()
raiseError('Incorrect Data.IR dimensions:')
os.remove(path)
# Missing Data.SamplingRate
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.createDimension('M', 3)
rootgrp.createDimension('N', 2)
rootgrp.createDimension('I', 1)
rootgrp.createDimension('R', 4)
rootgrp.createDimension('E', 5)
rootgrp.createVariable('Data.IR', 'f8', ('M', 'R', 'E', 'N'))
rootgrp.close()
raiseError('Missing Data.SamplingRate Variable')
# Incorrect Data.SamplingRate dimensions
rootgrp = Dataset(path, 'a')
rootgrp.createVariable('Data.SamplingRate', 'f8', ('N', 'M', 'R'))
rootgrp.close()
raiseError('Incorrect Data.SamplingRate dimensions:')
os.remove(path)
# Missing Data.SamplingRate.Units
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.createDimension('M', 3)
rootgrp.createDimension('N', 2)
rootgrp.createDimension('I', 1)
rootgrp.createDimension('R', 4)
rootgrp.createDimension('E', 5)
rootgrp.createVariable('Data.IR', 'f8', ('M', 'R', 'E', 'N'))
rootgrp.createVariable('Data.SamplingRate', 'f8', ('I',))
rootgrp.close()
raiseError('Missing Attribute Data.SamplingRate.Units')
# Add Data.SamplingRate.Units, incorrect Type
rootgrp = Dataset(path, 'a')
sr = rootgrp.variables['Data.SamplingRate']
sr.Units = 'Kelvin'
rootgrp.close()
raiseError('Attribute Data.SamplingRate.Units is not a frequency unit')
# Fix Type. Missing Data.Delay variable
rootgrp = Dataset(path, 'a')
sr = rootgrp.variables['Data.SamplingRate']
sr.Units = 'hertz'
rootgrp.close()
raiseError('Missing Data.Delay Variable')
# Add Data.Delay, incorrect dimensions
rootgrp = Dataset(path, 'a')
rootgrp.createVariable('Data.Delay', 'f8', ('R', 'N'))
rootgrp.close()
raiseError('Incorrect Data.Delay dimensions')
os.remove(path)
# Assert everything is correct
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.createDimension('M', 3)
rootgrp.createDimension('N', 2)
rootgrp.createDimension('I', 1)
rootgrp.createDimension('R', 4)
rootgrp.createDimension('E', 5)
rootgrp.createVariable('Data.IR', 'f8', ('M', 'R', 'E', 'N'))
sr = rootgrp.createVariable('Data.SamplingRate', 'f8', ('I',))
sr.Units = 'hertz'
rootgrp.createVariable('Data.Delay', 'f8', ('I', 'R', 'E'))
rootgrp.close()
assert SOFAFile(path, 'r').checkFIREDataType()
os.remove(path)
def test_checkSOSDataType():
def raiseError(errorString):
sofafile = SOFAFile(path, 'r')
with pytest.raises(SOFAError) as e:
sofafile.checkSOSDataType()
assert e.match(errorString)
sofafile.close()
# Missing M
fd, path = tempfile.mkstemp()
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.close()
raiseError('Dimension not found: M')
# Missing N
rootgrp = Dataset(path, 'a')
rootgrp.createDimension('M', 3)
rootgrp.close()
raiseError('Dimension not found: N')
# Missing I
rootgrp = Dataset(path, 'a')
rootgrp.createDimension('N', 2)
rootgrp.close()
raiseError('Dimension not found: I')
# Missing R
rootgrp = Dataset(path, 'a')
rootgrp.createDimension('I', 1)
rootgrp.close()
raiseError('Dimension not found: R')
# Missing Data.IR
rootgrp = Dataset(path, 'a')
rootgrp.createDimension('R', 4)
rootgrp.close()
raiseError('Missing Data.IR Variable')
# Incorrect Data.IR dimensions
rootgrp = Dataset(path, 'a')
rootgrp.createVariable('Data.IR', 'f8', ('N', 'M', 'R'))
rootgrp.close()
raiseError('Incorrect Data.IR dimensions:')
os.remove(path)
# Missing Data.SamplingRate
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.createDimension('M', 3)
rootgrp.createDimension('N', 2)
rootgrp.createDimension('I', 1)
rootgrp.createDimension('R', 4)
rootgrp.createVariable('Data.IR', 'f8', ('M', 'R', 'N'))
rootgrp.close()
raiseError('Missing Data.SamplingRate Variable')
# Incorrect Data.SamplingRate dimensions
rootgrp = Dataset(path, 'a')
rootgrp.createVariable('Data.SamplingRate', 'f8', ('N', 'M', 'R'))
rootgrp.close()
raiseError('Incorrect Data.SamplingRate dimensions:')
os.remove(path)
# Missing Data.SamplingRate.Units
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.createDimension('M', 3)
rootgrp.createDimension('N', 2)
rootgrp.createDimension('I', 1)
rootgrp.createDimension('R', 4)
rootgrp.createVariable('Data.IR', 'f8', ('M', 'R', 'N'))
rootgrp.createVariable('Data.SamplingRate', 'f8', ('I',))
rootgrp.close()
raiseError('Missing Attribute Data.SamplingRate.Units')
# Add Data.SamplingRate.Units, incorrect Type
rootgrp = Dataset(path, 'a')
sr = rootgrp.variables['Data.SamplingRate']
sr.Units = 'Kelvin'
rootgrp.close()
raiseError('Attribute Data.SamplingRate.Units is not a frequency unit')
# Fix Type. Missing Data.Delay variable
rootgrp = Dataset(path, 'a')
sr = rootgrp.variables['Data.SamplingRate']
sr.Units = 'hertz'
rootgrp.close()
raiseError('Missing Data.Delay Variable')
# Add Data.Delay, incorrect dimensions
rootgrp = Dataset(path, 'a')
rootgrp.createVariable('Data.Delay', 'f8', ('R', 'N'))
rootgrp.close()
raiseError('Incorrect Data.Delay dimensions')
os.remove(path)
# Assert everything is correct
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.createDimension('M', 3)
rootgrp.createDimension('N', 2)
rootgrp.createDimension('I', 1)
rootgrp.createDimension('R', 4)
rootgrp.createVariable('Data.IR', 'f8', ('M', 'R', 'N'))
sr = rootgrp.createVariable('Data.SamplingRate', 'f8', ('I',))
sr.Units = 'hertz'
rootgrp.createVariable('Data.Delay', 'f8', ('I', 'R'))
rootgrp.close()
assert SOFAFile(path, 'r').checkSOSDataType()
os.remove(path)
def test_checkTFDataType():
def raiseError(errorString):
sofafile = SOFAFile(path, 'r')
with pytest.raises(SOFAError) as e:
sofafile.checkTFDataType()
assert e.match(errorString)
sofafile.close()
# Missing M
fd, path = tempfile.mkstemp()
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.close()
raiseError('Dimension not found: M')
# Missing N
rootgrp = Dataset(path, 'a')
rootgrp.createDimension('M', 3)
rootgrp.close()
raiseError('Dimension not found: N')
# Missing I
rootgrp = Dataset(path, 'a')
rootgrp.createDimension('N', 2)
rootgrp.close()
raiseError('Dimension not found: I')
# Missing R
rootgrp = Dataset(path, 'a')
rootgrp.createDimension('I', 1)
rootgrp.close()
raiseError('Dimension not found: R')
# Missing Data.Real
rootgrp = Dataset(path, 'a')
rootgrp.createDimension('R', 4)
rootgrp.close()
raiseError('Missing Data.Real Variable')
# Incorrect Data.Real dimensions
rootgrp = Dataset(path, 'a')
rootgrp.createVariable('Data.Real', 'f8', ('N', 'M', 'R'))
rootgrp.close()
raiseError('Incorrect Data.Real dimensions:')
os.remove(path)
# Missing Data.Imag
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.createDimension('M', 3)
rootgrp.createDimension('N', 2)
rootgrp.createDimension('I', 1)
rootgrp.createDimension('R', 4)
rootgrp.createVariable('Data.Real', 'f8', ('M', 'R', 'N'))
rootgrp.close()
raiseError('Missing Data.Imag Variable')
# Incorrect Data.Imag dimensions
rootgrp = Dataset(path, 'a')
rootgrp.createVariable('Data.Imag', 'f8', ('N', 'M', 'R'))
rootgrp.close()
raiseError('Incorrect Data.Imag dimensions:')
os.remove(path)
# Missing N
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.createDimension('M', 3)
rootgrp.createDimension('N', 2)
rootgrp.createDimension('I', 1)
rootgrp.createDimension('R', 4)
rootgrp.createVariable('Data.Real', 'f8', ('M', 'R', 'N'))
rootgrp.createVariable('Data.Imag', 'f8', ('M', 'R', 'N'))
rootgrp.close()
raiseError('Missing N Variable')
# Incorrect N dimension
rootgrp = Dataset(path, 'a')
rootgrp.createVariable('N', 'f8', ('R',))
rootgrp.close()
raiseError('Incorrect N dimensions')
os.remove(path)
# Missing N.Units
rootgrp = Dataset(path, 'w', format='NETCDF4')
rootgrp.createDimension('M', 3)
rootgrp.createDimension('N', 2)
rootgrp.createDimension('I', 1)
rootgrp.createDimension('R', 4)
rootgrp.createVariable('Data.Real', 'f8', ('M', 'R', 'N'))
rootgrp.createVariable('Data.Imag', 'f8', ('M', 'R', 'N'))
rootgrp.createVariable('N', 'f8', ('N',))
rootgrp.close()
raiseError('Missing Attribute N.Units')
# Add N.Units, incorrect Type
rootgrp = Dataset(path, 'a')
sr = rootgrp.variables['N']
sr.Units = 'Kelvin'
rootgrp.close()
raiseError('Attribute N.Units is not a frequency unit')
# Assert everything is correct
rootgrp = Dataset(path, 'a')
n = rootgrp.variables['N']
n.Units = 'hertz'
rootgrp.close()
assert SOFAFile(path, 'r').checkTFDataType()
os.remove(path)
| 29.455822
| 103
| 0.656766
| 7,600
| 71,342
| 6.146974
| 0.063421
| 0.060535
| 0.07783
| 0.042297
| 0.808314
| 0.778132
| 0.730398
| 0.69037
| 0.664833
| 0.61864
| 0
| 0.011549
| 0.202588
| 71,342
| 2,422
| 104
| 29.455822
| 0.809647
| 0.091447
| 0
| 0.763529
| 0
| 0
| 0.125196
| 0.00873
| 0
| 0
| 0
| 0
| 0.070588
| 1
| 0.059412
| false
| 0
| 0.005294
| 0.000588
| 0.065882
| 0.003529
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
702098b965481a2e794c1378b66a32507ca3641a
| 47
|
py
|
Python
|
libs/src/evalib/datasets/__init__.py
|
gantir/eva4-2
|
e95d7f614d21931150d4c0b6b5437c90a742d408
|
[
"Apache-2.0"
] | null | null | null |
libs/src/evalib/datasets/__init__.py
|
gantir/eva4-2
|
e95d7f614d21931150d4c0b6b5437c90a742d408
|
[
"Apache-2.0"
] | null | null | null |
libs/src/evalib/datasets/__init__.py
|
gantir/eva4-2
|
e95d7f614d21931150d4c0b6b5437c90a742d408
|
[
"Apache-2.0"
] | 1
|
2021-04-10T05:03:53.000Z
|
2021-04-10T05:03:53.000Z
|
from .tinyimagenet import TinyImageNet # noqa
| 23.5
| 46
| 0.808511
| 5
| 47
| 7.6
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.148936
| 47
| 1
| 47
| 47
| 0.95
| 0.085106
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
70509e618b2b921ddbf29e7286a66aba1eab28de
| 71
|
py
|
Python
|
tracardi/process_engine/action/v1/operations/read_from_memory/model/config.py
|
DawidekZagajnik/tracardi
|
979015b7b14cb87fb639efb1eee6537932319b61
|
[
"MIT"
] | 153
|
2021-11-02T00:35:41.000Z
|
2022-03-25T16:37:44.000Z
|
tracardi/process_engine/action/v1/operations/read_from_memory/model/config.py
|
DawidekZagajnik/tracardi
|
979015b7b14cb87fb639efb1eee6537932319b61
|
[
"MIT"
] | 243
|
2021-10-17T17:00:22.000Z
|
2022-03-28T10:13:34.000Z
|
tracardi/process_engine/action/v1/operations/read_from_memory/model/config.py
|
DawidekZagajnik/tracardi
|
979015b7b14cb87fb639efb1eee6537932319b61
|
[
"MIT"
] | 14
|
2021-10-17T11:39:04.000Z
|
2022-03-14T14:44:02.000Z
|
from pydantic import BaseModel
class Config(BaseModel):
key: str
| 11.833333
| 30
| 0.746479
| 9
| 71
| 5.888889
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.197183
| 71
| 5
| 31
| 14.2
| 0.929825
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
705279396d944ca7ce3fec687a1f87ad3b34b61c
| 85
|
py
|
Python
|
final_project/machinetranslation/__init__.py
|
Pushapwant/xzceb-flask_eng_fr
|
aa0c945405ff5fc6ae74687acfb80dedb8b5f414
|
[
"Apache-2.0"
] | null | null | null |
final_project/machinetranslation/__init__.py
|
Pushapwant/xzceb-flask_eng_fr
|
aa0c945405ff5fc6ae74687acfb80dedb8b5f414
|
[
"Apache-2.0"
] | null | null | null |
final_project/machinetranslation/__init__.py
|
Pushapwant/xzceb-flask_eng_fr
|
aa0c945405ff5fc6ae74687acfb80dedb8b5f414
|
[
"Apache-2.0"
] | null | null | null |
# from translatory import frenchToEnglish, englishToFrench
from . import translatory
| 28.333333
| 58
| 0.847059
| 8
| 85
| 9
| 0.625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117647
| 85
| 2
| 59
| 42.5
| 0.96
| 0.658824
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
705c6d28eb91e395a67306d36b1fe038ab96d812
| 330
|
py
|
Python
|
datasets/__init__.py
|
ming71/DAL
|
48cd29fdbf5eeea1b5b642bd1f04bbf1863b31e3
|
[
"Apache-2.0"
] | 206
|
2020-09-12T06:17:00.000Z
|
2022-03-28T08:05:51.000Z
|
datasets/__init__.py
|
JOOCHANN/DAL
|
0f379de70ba01c6c9162f4e980a8bd2491976e9c
|
[
"Apache-2.0"
] | 47
|
2020-10-21T06:14:18.000Z
|
2022-03-16T01:54:28.000Z
|
datasets/__init__.py
|
JOOCHANN/DAL
|
0f379de70ba01c6c9162f4e980a8bd2491976e9c
|
[
"Apache-2.0"
] | 38
|
2020-10-22T10:39:51.000Z
|
2022-03-17T12:36:46.000Z
|
from .collater import Collater
from .icdar_dataset import IC15Dataset, IC13Dataset
from .voc_dataset import VOCDataset
from .hrsc_dataset import HRSCDataset
from .dota_dataset import DOTADataset
from .ucas_aod_dataset import UCAS_AODDataset
from .nwpu_vhr_dataset import NWPUDataset
from .gaofen_dataset import GaoFenShipDataset
| 33
| 51
| 0.869697
| 43
| 330
| 6.44186
| 0.488372
| 0.32852
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.013514
| 0.10303
| 330
| 9
| 52
| 36.666667
| 0.922297
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
5622d746fa6305a57eb0f7bd5d1c484b2d6e8759
| 227
|
py
|
Python
|
malib/spaces/__init__.py
|
alvaro-serra/malib
|
fe2b0736974c2a3ed9e41121b6cf475a3ee0b5a0
|
[
"MIT"
] | 23
|
2020-07-05T11:13:00.000Z
|
2022-01-28T00:24:41.000Z
|
malib/spaces/__init__.py
|
Taospirit/malib
|
fe2b0736974c2a3ed9e41121b6cf475a3ee0b5a0
|
[
"MIT"
] | 2
|
2020-09-07T19:09:40.000Z
|
2021-06-02T02:21:51.000Z
|
malib/spaces/__init__.py
|
Taospirit/malib
|
fe2b0736974c2a3ed9e41121b6cf475a3ee0b5a0
|
[
"MIT"
] | 8
|
2020-07-06T07:24:37.000Z
|
2021-09-27T20:28:25.000Z
|
from gym.spaces import Discrete, Box, Tuple, Dict
from malib.spaces.space import MASpace
from malib.spaces.env_spec import EnvSpec, MAEnvSpec
# __all__ = ["Box", "Discrete", "MASpace", "Tuple", "Dict", "EnvSpec", "MAEnvSpec"]
| 37.833333
| 83
| 0.735683
| 30
| 227
| 5.4
| 0.533333
| 0.111111
| 0.185185
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.118943
| 227
| 5
| 84
| 45.4
| 0.81
| 0.356828
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
564caf085fc98a855a7b080070efb5ad25a564a3
| 120
|
py
|
Python
|
Introduction to Python/Introduction to Python Smallpiece 2018/Strings/Concatenation/concatenation.py
|
phamola/firstproject1
|
1e2aaafeb7abf9c82e4c823f197fc1fbefce6416
|
[
"Apache-2.0"
] | null | null | null |
Introduction to Python/Introduction to Python Smallpiece 2018/Strings/Concatenation/concatenation.py
|
phamola/firstproject1
|
1e2aaafeb7abf9c82e4c823f197fc1fbefce6416
|
[
"Apache-2.0"
] | null | null | null |
Introduction to Python/Introduction to Python Smallpiece 2018/Strings/Concatenation/concatenation.py
|
phamola/firstproject1
|
1e2aaafeb7abf9c82e4c823f197fc1fbefce6416
|
[
"Apache-2.0"
] | null | null | null |
hello = "Hello"
world = 'World'
hello_world = type here
print(hello_world) # Note: you should print "Hello World"
| 20
| 62
| 0.691667
| 17
| 120
| 4.764706
| 0.470588
| 0.493827
| 0.37037
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 120
| 5
| 63
| 24
| 0.84375
| 0.3
| 0
| 0
| 0
| 0
| 0.121951
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.25
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
565e348e8ff67a4b4837ae86082be8404875f259
| 61
|
py
|
Python
|
Python Set 2/VSCodeTest1.py
|
bertt1t11/Python
|
6a0f4b43b1c5598b311db3fb07b42313001b469a
|
[
"Apache-2.0"
] | null | null | null |
Python Set 2/VSCodeTest1.py
|
bertt1t11/Python
|
6a0f4b43b1c5598b311db3fb07b42313001b469a
|
[
"Apache-2.0"
] | null | null | null |
Python Set 2/VSCodeTest1.py
|
bertt1t11/Python
|
6a0f4b43b1c5598b311db3fb07b42313001b469a
|
[
"Apache-2.0"
] | null | null | null |
import tkinter
from tkinter import *
Widget.bell(2,1)
| 10.166667
| 22
| 0.688525
| 9
| 61
| 4.666667
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.042553
| 0.229508
| 61
| 6
| 23
| 10.166667
| 0.851064
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
56911f415785050484075f24047628d8a642c328
| 211
|
py
|
Python
|
api/model/credentials.py
|
Th3re/steve-auth-api
|
35037ca99a9cba2155214752a1a13ce9369468ec
|
[
"MIT"
] | 1
|
2020-04-17T20:13:16.000Z
|
2020-04-17T20:13:16.000Z
|
api/model/credentials.py
|
Th3re/steve-calendar-api
|
65678f6f39204aaa09f24ed85a13b97b9a55d9b6
|
[
"MIT"
] | 1
|
2020-03-22T16:27:16.000Z
|
2020-03-22T16:27:16.000Z
|
api/model/credentials.py
|
Th3re/steve-auth-api
|
35037ca99a9cba2155214752a1a13ce9369468ec
|
[
"MIT"
] | null | null | null |
from api.libs.representation.pretty import PrettyPrint
class Credentials(PrettyPrint):
def __init__(self, refresh_token, user_id):
self.user_id = user_id
self.refresh_token = refresh_token
| 26.375
| 54
| 0.748815
| 27
| 211
| 5.481481
| 0.592593
| 0.243243
| 0.216216
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.180095
| 211
| 7
| 55
| 30.142857
| 0.855491
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0.2
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
3b06cf3d5339cc2345dab5030891086f09a05597
| 28,060
|
py
|
Python
|
plantclef/plantclef_meta.py
|
kelvincr/ViT-pytorch
|
fca6694ba595dfb118b0f7402abc804ceea387d5
|
[
"MIT"
] | null | null | null |
plantclef/plantclef_meta.py
|
kelvincr/ViT-pytorch
|
fca6694ba595dfb118b0f7402abc804ceea387d5
|
[
"MIT"
] | null | null | null |
plantclef/plantclef_meta.py
|
kelvincr/ViT-pytorch
|
fca6694ba595dfb118b0f7402abc804ceea387d5
|
[
"MIT"
] | null | null | null |
family = {'159714': 0, '159719': 0, '191373': 1, '202271': 2, '160570': 0, '161033': 0, '65910': 3, '197755': 4, '197761': 4, '279730': 5, '279732': 5, '191642': 1, '197791': 4, '41751': 6, '41752': 6, '41764': 6, '279781': 5, '279787': 5, '279833': 5, '279848': 5, '279849': 5, '279853': 5, '279869': 5, '279877': 5, '279884': 5, '279895': 5, '279923': 5, '279924': 5, '50269': 7, '50420': 7, '148220': 8, '148291': 8, '214245': 9, '156425': 10, '156430': 10, '156435': 10, '162067': 0, '19165': 11, '309517': 12, '330070': 13, '7660': 14, '7767': 14, '290677': 15, '249436': 16, '148446': 8, '19341': 11, '322921': 17, '41801': 6, '41810': 6, '12254': 18, '13239': 19, '279988': 5, '156507': 10, '156516': 10, '156523': 10, '156526': 10, '156529': 10, '156530': 10, '156543': 10, '13276': 19, '13325': 19, '13330': 19, '13336': 19, '13370': 19, '13376': 19, '13390': 19, '13401': 19, '13420': 19, '26327': 20, '26393': 20, '26635': 20, '191839': 1, '197861': 4, '50551': 7, '33693': 21, '19489': 11, '19758': 11, '37194': 22, '37371': 22, '37382': 22, '37525': 22, '37545': 22, '30924': 23, '30925': 23, '30929': 23, '30937': 23, '373': 24, '30986': 23, '244705': 25, '72999': 26, '31014': 23, '31016': 23, '31023': 23, '31032': 23, '31042': 23, '31061': 23, '31066': 23, '31071': 23, '31075': 23, '31082': 23, '306822': 27, '620': 24, '166135': 0, '2041': 24, '197919': 4, '2071': 24, '2401': 24, '3454': 24, '50622': 7, '42649': 28, '42799': 29, '3491': 24, '166405': 0, '216314': 30, '203347': 31, '4291': 32, '6834': 33, '190183': 34, '53804': 35, '190287': 34, '8656': 14, '12518': 18, '13820': 19, '14401': 19, '27221': 20, '167094': 0, '15014': 19, '67084': 36, '205170': 37, '205179': 37, '205564': 37, '273462': 38, '60320': 39, '291840': 15, '57485': 40, '201652': 41, '15091': 19, '17032': 42, '61859': 43, '61862': 43, '19887': 11, '61866': 43, '21329': 11, '306892': 27, '22256': 11, '306961': 27, '306966': 27, '167503': 0, '22319': 11, '285398': 44, '192285': 1, '22348': 11, '323153': 45, '201678': 41, '192311': 1, '23107': 11, '29586': 46, '280085': 5, '38515': 47, '167721': 0, '39604': 48, '41913': 6, '41921': 6, '212342': 49, '64672': 50, '41941': 6, '292093': 15, '292096': 15, '41992': 6, '42003': 6, '65928': 3, '311149': 51, '311160': 51, '311165': 51, '311202': 51, '311210': 51, '311223': 51, '42046': 6, '60188': 52, '145020': 53, '42087': 6, '42122': 6, '220108': 30, '42258': 6, '42264': 6, '42278': 6, '42289': 6, '42586': 6, '42594': 6, '54608': 54, '54615': 54, '54777': 55, '67371': 36, '67374': 36, '67386': 36, '67457': 36, '67482': 36, '67517': 36, '67539': 36, '42658': 28, '55477': 56, '56868': 56, '69125': 57, '69226': 57, '57575': 40, '128664': 58, '58398': 59, '61792': 60, '68769': 61, '43548': 62, '106387': 63, '106398': 63, '106413': 63, '106461': 63, '106466': 63, '65935': 3, '65939': 3, '65949': 3, '65955': 3, '65960': 3, '61864': 43, '20515': 11, '159434': 64, '159437': 64, '159445': 64, '323166': 45, '323189': 45, '65979': 3, '60646': 65, '13597': 19, '66121': 3, '66173': 3, '129339': 58, '66251': 3, '129645': 58, '169457': 0, '169479': 0, '169495': 0, '66253': 3, '309819': 12, '309845': 12, '309849': 12, '66257': 3, '330590': 13, '66284': 3, '66310': 3, '109922': 66, '109976': 66, '110027': 66, '66853': 67, '118800': 68, '13656': 19, '169611': 0, '60659': 65, '67990': 36, '68607': 61, '196781': 69, '196785': 69, '68717': 61, '61750': 70, '32035': 23, '32037': 23, '73884': 26, '170879': 0, '69344': 57, '170934': 0, '141332': 71, '141336': 71, '144394': 72, '27455': 20, '27457': 20, '76188': 26, '76694': 26, '77596': 26, '120497': 73, '120499': 73, '120508': 73, '120562': 73, '79435': 26, '80596': 26, '80672': 26, '120980': 73, '80735': 26, '80894': 26, '38247': 74, '38260': 74, '38272': 74, '171131': 0, '319098': 75, '87237': 26, '27492': 20, '13796': 19, '13811': 19, '90626': 26, '13870': 19, '13879': 19, '93740': 26, '293248': 15, '171333': 0, '311252': 51, '311258': 51, '95163': 26, '275029': 76, '98464': 26, '145231': 53, '211755': 77, '211759': 77, '99040': 26, '99232': 26, '157810': 10, '157841': 10, '157842': 10, '102689': 26, '171479': 0, '171485': 0, '171495': 0, '13907': 19, '226017': 30, '104587': 78, '105927': 79, '105951': 79, '105965': 79, '106023': 79, '106690': 80, '171727': 0, '127020': 81, '127097': 81, '127151': 81, '127242': 81, '159483': 64, '159486': 64, '159490': 64, '159505': 64, '159516': 64, '159518': 64, '159535': 64, '159539': 64, '159541': 64, '330705': 13, '206694': 37, '206710': 37, '206728': 37, '206729': 37, '206767': 37, '206874': 37, '206927': 37, '207009': 37, '207042': 37, '207161': 37, '207167': 37, '207375': 37, '207441': 37, '207452': 37, '207470': 37, '207575': 37, '207589': 37, '108335': 82, '108581': 82, '108743': 82, '117166': 83, '120569': 73, '120745': 73, '32248': 23, '110432': 84, '66033': 3, '120786': 73, '203918': 31, '204178': 31, '204277': 31, '69391': 57, '121037': 73, '121088': 73, '122564': 85, '14044': 19, '305773': 86, '67619': 36, '67784': 36, '67786': 36, '21388': 11, '21391': 11, '32258': 23, '32272': 23, '32293': 23, '32316': 23, '125412': 85, '132325': 58, '129442': 58, '141508': 87, '130017': 58, '141856': 88, '201893': 41, '14198': 19, '14287': 19, '14294': 19, '14314': 19, '14353': 19, '14359': 19, '130657': 58, '14402': 19, '193515': 1, '131079': 58, '159581': 64, '159597': 64, '51237': 7, '42180': 6, '131128': 58, '212384': 49, '212395': 49, '142550': 89, '142564': 89, '142584': 89, '142646': 89, '142653': 89, '142661': 89, '131736': 58, '132107': 58, '198802': 4, '132113': 58, '142736': 90, '27569': 20, '27581': 20, '132476': 58, '132501': 58, '21718': 11, '66056': 3, '66070': 3, '66083': 3, '66090': 3, '66108': 3, '66115': 3, '133181': 58, '66126': 3, '66143': 3, '133414': 58, '133595': 58, '194115': 1, '188837': 91, '132431': 58, '135479': 92, '173155': 0, '11438': 93, '143466': 94, '143496': 94, '32433': 23, '136761': 92, '137051': 92, '137122': 92, '173907': 0, '173912': 0, '173914': 0, '173972': 0, '174017': 0, '174029': 0, '174063': 0, '174066': 0, '174076': 0, '174087': 0, '174137': 0, '174138': 0, '174158': 0, '174163': 0, '174165': 0, '174170': 0, '174188': 0, '174207': 0, '174213': 0, '174233': 0, '174252': 0, '229370': 30, '204775': 95, '204777': 95, '295591': 15, '42220': 6, '137133': 92, '138662': 96, '140367': 96, '141068': 96, '142779': 90, '144643': 97, '148977': 8, '149264': 8, '157870': 10, '296317': 15, '148077': 98, '148080': 98, '148086': 98, '21973': 11, '307230': 27, '189532': 99, '149682': 8, '149919': 8, '150135': 8, '150179': 8, '150888': 8, '159625': 64, '159627': 64, '159629': 64, '159630': 64, '159637': 64, '159640': 64, '159647': 64, '151469': 8, '229955': 30, '66171': 3, '151593': 8, '66197': 3, '152143': 8, '166822': 0, '166869': 0, '66277': 3, '167549': 0, '66286': 3, '66297': 3, '66303': 3, '66306': 3, '167811': 0, '157894': 10, '157911': 10, '157917': 10, '4283': 32, '167813': 0, '186696': 100, '186704': 100, '186714': 100, '186746': 100, '186757': 100, '167921': 0, '168290': 0, '169293': 0, '169943': 0, '171057': 0, '171067': 0, '171452': 0, '194252': 1, '176381': 0, '177501': 0, '189086': 101, '189087': 101, '132735': 58, '177730': 0, '22118': 11, '177775': 0, '176453': 0, '176461': 0, '57572': 40, '178094': 0, '22180': 11, '180255': 0, '181966': 0, '181973': 0, '182092': 0, '182140': 0, '182246': 0, '311462': 51, '311513': 51, '182247': 0, '185302': 102, '133287': 58, '196487': 103, '185321': 102, '185430': 102, '186582': 104, '187279': 105, '232112': 30, '310335': 12, '310355': 12, '187306': 105, '32768': 23, '187515': 105, '65211': 50, '187824': 106, '189172': 99, '202825': 107, '199580': 4, '199587': 4, '189549': 99, '199627': 4, '189669': 99, '192142': 1, '199742': 4, '193141': 1, '193600': 1, '199838': 4, '199938': 4, '193927': 1, '200031': 4, '194035': 1, '200087': 4, '194701': 1, '200202': 4, '200204': 4, '200215': 4, '200228': 4, '195246': 1, '200319': 4, '200320': 4, '200338': 4, '200402': 4, '274102': 38, '274108': 38, '274114': 38, '274118': 38, '311546': 51, '311557': 51, '311563': 51, '311565': 51, '195454': 1, '195768': 1, '196866': 108, '196912': 108, '196966': 108, '212412': 49, '27760': 20, '27803': 20, '198260': 4, '198307': 4, '200638': 4, '200640': 4, '200660': 4, '200676': 4, '200679': 4, '200692': 4, '200704': 4, '198315': 4, '198341': 4, '198363': 4, '208641': 37, '208673': 37, '208724': 37, '208788': 37, '208865': 37, '208949': 37, '208956': 37, '208971': 37, '208994': 37, '209006': 37, '209078': 37, '158323': 10, '158341': 10, '211519': 109, '211059': 110, '211075': 110, '198419': 4, '274257': 38, '198471': 4, '198555': 4, '198557': 4, '158534': 10, '158542': 10, '158592': 10, '158596': 10, '158612': 10, '158629': 10, '158653': 10, '158668': 10, '158728': 10, '158736': 10, '158784': 10, '158793': 10, '158816': 10, '158833': 10, '158866': 10, '158884': 10, '158894': 10, '198819': 4, '119986': 68, '198971': 4, '29686': 46, '178589': 0, '178591': 0, '178637': 0, '178654': 0, '199099': 4, '211958': 77, '212012': 77, '199111': 4, '212120': 77, '14741': 19, '14766': 19, '194716': 1, '199158': 4, '199197': 4, '235182': 30, '279396': 111, '23429': 11, '179451': 0, '66426': 3, '66436': 3, '179495': 0, '179511': 0, '179512': 0, '199220': 4, '326051': 112, '179616': 0, '179643': 0, '199591': 4, '199672': 4, '199728': 4, '199744': 4, '199810': 4, '200006': 4, '27989': 20, '28014': 20, '28037': 20, '28066': 20, '28131': 20, '28143': 20, '28154': 20, '28203': 20, '28225': 20, '28235': 20, '200075': 4, '200119': 4, '200249': 4, '200712': 4, '200877': 4, '315771': 113, '200915': 4, '201073': 4, '201318': 4, '201384': 4, '202049': 41, '212087': 77, '212378': 49, '212920': 114, '280698': 5, '68010': 36, '213809': 115, '236883': 30, '237053': 30, '213831': 115, '180253': 0, '213841': 115, '249138': 116, '120031': 68, '120045': 68, '274693': 38, '213850': 115, '299768': 15, '213864': 115, '324330': 45, '324340': 45, '324344': 45, '324353': 45, '324356': 45, '311974': 51, '312000': 51, '312005': 51, '312024': 51, '312054': 51, '312118': 51, '312121': 51, '312135': 51, '312150': 51, '312158': 51, '312172': 51, '312184': 51, '237937': 30, '213866': 115, '54503': 54, '54525': 54, '54543': 54, '54566': 54, '54580': 54, '54590': 54, '54594': 54, '54595': 54, '54597': 54, '310653': 12, '204432': 31, '180446': 0, '14939': 19, '209327': 37, '209328': 37, '248199': 117, '116853': 118, '280745': 5, '280887': 5, '180631': 0, '248283': 117, '248325': 117, '195192': 1, '195201': 1, '248357': 117, '212199': 77, '212213': 77, '301929': 15, '283776': 119, '331212': 13, '331234': 13, '284430': 120, '248374': 117, '159248': 10, '248392': 117, '248478': 121, '326060': 112, '326108': 112, '326115': 112, '326175': 112, '250534': 16, '251291': 16, '251585': 122, '279552': 111, '251624': 122, '252665': 123, '252779': 123, '307446': 27, '252811': 123, '253046': 123, '306775': 124, '133604': 58, '308130': 125, '133607': 58, '133617': 58, '133630': 58, '312237': 51, '30431': 46, '313517': 126, '253695': 123, '253836': 123, '254000': 123, '254111': 123, '254180': 123, '256363': 127, '257336': 128, '315398': 129, '315564': 129, '315584': 129, '315597': 129, '315603': 129, '315651': 129, '270090': 130, '270961': 131, '271764': 131, '271781': 131, '275255': 132, '276624': 133, '285297': 134, '291847': 15, '159284': 10, '292111': 15, '315831': 113, '292732': 15, '293559': 15, '315862': 135, '315864': 135, '315866': 135, '315877': 135, '315899': 135, '121825': 136, '121836': 136, '121841': 136, '121854': 136, '316145': 137, '33309': 23, '294700': 15, '297360': 15, '298602': 15, '317986': 138, '298670': 15, '301640': 15, '302476': 15, '302787': 15, '302866': 15, '303214': 15, '305648': 86, '305857': 86, '306879': 27, '306921': 27, '308678': 139, '309983': 12, '310373': 12, '310554': 12, '182826': 0, '12910': 18, '310768': 12, '310829': 12, '195628': 1, '195663': 1, '195688': 1, '310914': 12, '187390': 105, '313568': 140, '182907': 0, '319391': 141, '183060': 0, '183061': 0, '183069': 0, '183113': 0, '183144': 0, '183176': 0, '183194': 0, '316306': 138, '33348': 23, '68035': 36, '316519': 138, '126895': 142, '28904': 20, '42532': 6, '24557': 11, '317044': 138, '183297': 0, '183298': 0, '183304': 0, '183305': 0, '183318': 0, '317444': 138, '310933': 12, '317531': 138, '12922': 18, '12924': 18, '117064': 143, '183350': 0, '317643': 138, '320058': 144, '320118': 144, '320168': 144, '68821': 61, '248950': 145, '317709': 138, '317765': 138, '321160': 146, '321261': 146, '321303': 146, '321529': 146, '195752': 1, '317809': 138, '195770': 1, '12932': 18, '12937': 18, '318110': 138, '52657': 7, '52973': 7, '318253': 138, '127007': 142, '318313': 138, '311021': 12, '68084': 36, '68097': 36, '68104': 36, '60309': 147, '202107': 41, '202118': 41, '202134': 41, '143674': 94, '143676': 94, '143697': 94, '143706': 94, '143710': 94, '143731': 94, '143749': 94, '143755': 94, '143758': 94, '143780': 94, '143783': 94, '242361': 30, '15089': 19, '318639': 138, '320188': 144, '204499': 31, '318675': 138, '318743': 138, '318807': 138, '318869': 138, '324133': 45, '15119': 19, '15120': 19, '15135': 19, '15140': 19, '15147': 19, '325055': 148, '29013': 20, '325220': 148, '325552': 148, '325562': 148, '184434': 0, '144341': 149, '144357': 149, '144377': 149, '144384': 149, '156069': 8, '281000': 5, '281017': 5, '327875': 150, '201444': 4, '184849': 0, '311053': 12, '325605': 148, '325648': 148, '15318': 19, '15330': 19, '15336': 19, '15355': 19, '15382': 19, '15409': 19, '306466': 86, '306577': 86, '325681': 148, '185128': 0, '185133': 0}
genus = {'159714': 0, '159719': 0, '191373': 1, '202271': 2, '160570': 3, '161033': 3, '65910': 4, '197755': 5, '197761': 5, '279730': 6, '279732': 6, '191642': 7, '197791': 8, '41751': 9, '41752': 9, '41764': 9, '279781': 10, '279787': 10, '279833': 10, '279848': 10, '279849': 10, '279853': 10, '279869': 10, '279877': 10, '279884': 10, '279895': 10, '279923': 10, '279924': 10, '50269': 11, '50420': 11, '148220': 12, '148291': 12, '214245': 13, '156425': 14, '156430': 14, '156435': 14, '162067': 15, '19165': 16, '309517': 17, '330070': 18, '7660': 19, '7767': 19, '290677': 20, '249436': 21, '148446': 22, '19341': 23, '322921': 24, '41801': 25, '41810': 25, '12254': 26, '13239': 27, '279988': 28, '156507': 29, '156516': 29, '156523': 29, '156526': 29, '156529': 29, '156530': 29, '156543': 29, '13276': 30, '13325': 30, '13330': 30, '13336': 30, '13370': 30, '13376': 30, '13390': 30, '13401': 30, '13420': 30, '26327': 31, '26393': 31, '26635': 31, '191839': 32, '197861': 33, '50551': 34, '33693': 35, '19489': 36, '19758': 37, '37194': 38, '37371': 38, '37382': 38, '37525': 38, '37545': 38, '30924': 39, '30925': 39, '30929': 39, '30937': 39, '373': 40, '30986': 41, '244705': 42, '72999': 43, '31014': 44, '31016': 44, '31023': 44, '31032': 44, '31042': 44, '31061': 44, '31066': 44, '31071': 44, '31075': 44, '31082': 44, '306822': 45, '620': 46, '166135': 47, '2041': 48, '197919': 49, '2071': 48, '2401': 48, '3454': 50, '50622': 51, '42649': 52, '42799': 53, '3491': 50, '166405': 54, '216314': 55, '203347': 56, '4291': 57, '6834': 58, '190183': 59, '53804': 60, '190287': 61, '8656': 62, '12518': 63, '13820': 64, '14401': 65, '27221': 66, '167094': 67, '15014': 68, '67084': 69, '205170': 70, '205179': 70, '205564': 71, '273462': 72, '60320': 73, '291840': 74, '57485': 75, '201652': 76, '15091': 77, '17032': 78, '61859': 79, '61862': 79, '19887': 80, '61866': 79, '21329': 81, '306892': 82, '22256': 83, '306961': 82, '306966': 82, '167503': 84, '22319': 83, '285398': 85, '192285': 86, '22348': 83, '323153': 87, '201678': 88, '192311': 89, '23107': 90, '29586': 91, '280085': 92, '38515': 93, '167721': 94, '39604': 95, '41913': 96, '41921': 96, '212342': 97, '64672': 98, '41941': 99, '292093': 100, '292096': 100, '41992': 101, '42003': 102, '65928': 103, '311149': 104, '311160': 104, '311165': 104, '311202': 104, '311210': 104, '311223': 104, '42046': 105, '60188': 106, '145020': 107, '42087': 108, '42122': 108, '220108': 109, '42258': 110, '42264': 111, '42278': 112, '42289': 112, '42586': 113, '42594': 114, '54608': 115, '54615': 115, '54777': 116, '67371': 117, '67374': 117, '67386': 117, '67457': 117, '67482': 117, '67517': 117, '67539': 117, '42658': 118, '55477': 119, '56868': 120, '69125': 121, '69226': 121, '57575': 122, '128664': 123, '58398': 124, '61792': 125, '68769': 126, '43548': 127, '106387': 128, '106398': 128, '106413': 128, '106461': 128, '106466': 128, '65935': 129, '65939': 129, '65949': 129, '65955': 129, '65960': 129, '61864': 79, '20515': 130, '159434': 131, '159437': 131, '159445': 131, '323166': 132, '323189': 132, '65979': 129, '60646': 133, '13597': 134, '66121': 135, '66173': 136, '129339': 137, '66251': 136, '129645': 137, '169457': 138, '169479': 138, '169495': 138, '66253': 136, '309819': 139, '309845': 139, '309849': 139, '66257': 136, '330590': 140, '66284': 136, '66310': 136, '109922': 141, '109976': 141, '110027': 141, '66853': 142, '118800': 143, '13656': 144, '169611': 145, '60659': 146, '67990': 147, '68607': 148, '196781': 149, '196785': 149, '68717': 148, '61750': 150, '32035': 151, '32037': 151, '73884': 152, '170879': 153, '69344': 154, '170934': 155, '141332': 156, '141336': 156, '144394': 157, '27455': 158, '27457': 158, '76188': 159, '76694': 160, '77596': 161, '120497': 162, '120499': 162, '120508': 162, '120562': 162, '79435': 163, '80596': 164, '80672': 165, '120980': 162, '80735': 166, '80894': 167, '38247': 168, '38260': 168, '38272': 168, '171131': 169, '319098': 170, '87237': 171, '27492': 172, '13796': 64, '13811': 64, '90626': 173, '13870': 64, '13879': 64, '93740': 174, '293248': 175, '171333': 176, '311252': 177, '311258': 177, '95163': 178, '275029': 179, '98464': 180, '145231': 181, '211755': 182, '211759': 182, '99040': 183, '99232': 184, '157810': 185, '157841': 185, '157842': 185, '102689': 186, '171479': 187, '171485': 188, '171495': 188, '13907': 189, '226017': 190, '104587': 191, '105927': 192, '105951': 193, '105965': 193, '106023': 194, '106690': 195, '171727': 196, '127020': 197, '127097': 197, '127151': 197, '127242': 197, '159483': 198, '159486': 198, '159490': 198, '159505': 198, '159516': 198, '159518': 198, '159535': 198, '159539': 198, '159541': 198, '330705': 199, '206694': 200, '206710': 200, '206728': 200, '206729': 200, '206767': 200, '206874': 200, '206927': 200, '207009': 200, '207042': 200, '207161': 200, '207167': 200, '207375': 200, '207441': 200, '207452': 200, '207470': 200, '207575': 200, '207589': 200, '108335': 201, '108581': 202, '108743': 203, '117166': 204, '120569': 162, '120745': 162, '32248': 205, '110432': 206, '66033': 207, '120786': 162, '203918': 208, '204178': 208, '204277': 208, '69391': 209, '121037': 162, '121088': 162, '122564': 210, '14044': 211, '305773': 212, '67619': 213, '67784': 213, '67786': 213, '21388': 214, '21391': 214, '32258': 215, '32272': 215, '32293': 215, '32316': 215, '125412': 216, '132325': 217, '129442': 137, '141508': 218, '130017': 219, '141856': 220, '201893': 221, '14198': 65, '14287': 65, '14294': 65, '14314': 65, '14353': 65, '14359': 65, '130657': 222, '14402': 65, '193515': 223, '131079': 222, '159581': 224, '159597': 224, '51237': 225, '42180': 226, '131128': 222, '212384': 227, '212395': 227, '142550': 228, '142564': 228, '142584': 228, '142646': 228, '142653': 228, '142661': 228, '131736': 222, '132107': 222, '198802': 229, '132113': 222, '142736': 230, '27569': 231, '27581': 231, '132476': 232, '132501': 232, '21718': 233, '66056': 135, '66070': 135, '66083': 135, '66090': 135, '66108': 135, '66115': 135, '133181': 234, '66126': 135, '66143': 135, '133414': 235, '133595': 236, '194115': 237, '188837': 238, '132431': 239, '135479': 240, '173155': 241, '11438': 242, '143466': 243, '143496': 243, '32433': 244, '136761': 245, '137051': 246, '137122': 247, '173907': 248, '173912': 248, '173914': 248, '173972': 248, '174017': 248, '174029': 248, '174063': 248, '174066': 248, '174076': 248, '174087': 248, '174137': 248, '174138': 248, '174158': 248, '174163': 248, '174165': 248, '174170': 248, '174188': 248, '174207': 248, '174213': 248, '174233': 248, '174252': 248, '229370': 249, '204775': 250, '204777': 250, '295591': 251, '42220': 252, '137133': 247, '138662': 253, '140367': 254, '141068': 255, '142779': 256, '144643': 257, '148977': 258, '149264': 259, '157870': 260, '296317': 261, '148077': 262, '148080': 262, '148086': 262, '21973': 263, '307230': 264, '189532': 265, '149682': 266, '149919': 267, '150135': 268, '150179': 268, '150888': 269, '159625': 270, '159627': 270, '159629': 270, '159630': 270, '159637': 270, '159640': 270, '159647': 270, '151469': 271, '229955': 272, '66171': 136, '151593': 273, '66197': 136, '152143': 274, '166822': 275, '166869': 276, '66277': 136, '167549': 84, '66286': 136, '66297': 136, '66303': 136, '66306': 136, '167811': 94, '157894': 277, '157911': 277, '157917': 277, '4283': 278, '167813': 94, '186696': 279, '186704': 279, '186714': 279, '186746': 279, '186757': 279, '167921': 94, '168290': 280, '169293': 281, '169943': 282, '171057': 283, '171067': 283, '171452': 284, '194252': 285, '176381': 286, '177501': 287, '189086': 288, '189087': 288, '132735': 289, '177730': 287, '22118': 290, '177775': 287, '176453': 291, '176461': 291, '57572': 292, '178094': 293, '22180': 294, '180255': 295, '181966': 296, '181973': 296, '182092': 297, '182140': 297, '182246': 297, '311462': 298, '311513': 298, '182247': 297, '185302': 299, '133287': 300, '196487': 301, '185321': 299, '185430': 299, '186582': 302, '187279': 303, '232112': 304, '310335': 305, '310355': 305, '187306': 303, '32768': 306, '187515': 307, '65211': 308, '187824': 309, '189172': 310, '202825': 311, '199580': 312, '199587': 312, '189549': 313, '199627': 312, '189669': 314, '192142': 315, '199742': 312, '193141': 316, '193600': 317, '199838': 312, '199938': 312, '193927': 318, '200031': 312, '194035': 318, '200087': 312, '194701': 319, '200202': 312, '200204': 312, '200215': 312, '200228': 312, '195246': 320, '200319': 312, '200320': 312, '200338': 312, '200402': 312, '274102': 321, '274108': 321, '274114': 321, '274118': 321, '311546': 322, '311557': 322, '311563': 322, '311565': 322, '195454': 323, '195768': 324, '196866': 325, '196912': 326, '196966': 327, '212412': 328, '27760': 329, '27803': 330, '198260': 331, '198307': 331, '200638': 332, '200640': 332, '200660': 332, '200676': 332, '200679': 332, '200692': 332, '200704': 332, '198315': 331, '198341': 331, '198363': 331, '208641': 333, '208673': 333, '208724': 333, '208788': 333, '208865': 333, '208949': 333, '208956': 333, '208971': 333, '208994': 333, '209006': 333, '209078': 334, '158323': 335, '158341': 335, '211519': 336, '211059': 337, '211075': 337, '198419': 338, '274257': 339, '198471': 340, '198555': 341, '198557': 341, '158534': 342, '158542': 342, '158592': 342, '158596': 342, '158612': 342, '158629': 342, '158653': 342, '158668': 342, '158728': 342, '158736': 342, '158784': 342, '158793': 342, '158816': 342, '158833': 342, '158866': 342, '158884': 342, '158894': 342, '198819': 343, '119986': 344, '198971': 345, '29686': 346, '178589': 347, '178591': 347, '178637': 347, '178654': 347, '199099': 345, '211958': 348, '212012': 348, '199111': 345, '212120': 348, '14741': 349, '14766': 349, '194716': 350, '199158': 351, '199197': 352, '235182': 353, '279396': 354, '23429': 355, '179451': 356, '66426': 357, '66436': 357, '179495': 358, '179511': 358, '179512': 358, '199220': 359, '326051': 360, '179616': 361, '179643': 362, '199591': 312, '199672': 312, '199728': 312, '199744': 312, '199810': 312, '200006': 312, '27989': 363, '28014': 363, '28037': 363, '28066': 363, '28131': 363, '28143': 363, '28154': 363, '28203': 363, '28225': 363, '28235': 363, '200075': 312, '200119': 312, '200249': 312, '200712': 364, '200877': 365, '315771': 366, '200915': 367, '201073': 368, '201318': 369, '201384': 370, '202049': 371, '212087': 348, '212378': 227, '212920': 372, '280698': 373, '68010': 374, '213809': 375, '236883': 376, '237053': 376, '213831': 375, '180253': 295, '213841': 375, '249138': 377, '120031': 378, '120045': 378, '274693': 379, '213850': 375, '299768': 380, '213864': 375, '324330': 381, '324340': 381, '324344': 381, '324353': 381, '324356': 381, '311974': 382, '312000': 382, '312005': 382, '312024': 382, '312054': 382, '312118': 382, '312121': 382, '312135': 382, '312150': 382, '312158': 382, '312172': 383, '312184': 383, '237937': 384, '213866': 375, '54503': 385, '54525': 385, '54543': 385, '54566': 385, '54580': 385, '54590': 385, '54594': 385, '54595': 385, '54597': 385, '310653': 386, '204432': 387, '180446': 388, '14939': 389, '209327': 390, '209328': 390, '248199': 391, '116853': 392, '280745': 393, '280887': 393, '180631': 394, '248283': 395, '248325': 395, '195192': 320, '195201': 320, '248357': 395, '212199': 396, '212213': 396, '301929': 397, '283776': 398, '331212': 399, '331234': 399, '284430': 400, '248374': 395, '159248': 401, '248392': 395, '248478': 402, '326060': 403, '326108': 403, '326115': 403, '326175': 403, '250534': 404, '251291': 404, '251585': 405, '279552': 406, '251624': 407, '252665': 408, '252779': 408, '307446': 409, '252811': 408, '253046': 408, '306775': 410, '133604': 411, '308130': 412, '133607': 413, '133617': 414, '133630': 414, '312237': 415, '30431': 416, '313517': 417, '253695': 418, '253836': 418, '254000': 418, '254111': 418, '254180': 418, '256363': 419, '257336': 420, '315398': 421, '315564': 421, '315584': 421, '315597': 421, '315603': 421, '315651': 421, '270090': 422, '270961': 423, '271764': 424, '271781': 424, '275255': 425, '276624': 426, '285297': 427, '291847': 428, '159284': 429, '292111': 430, '315831': 431, '292732': 432, '293559': 433, '315862': 434, '315864': 434, '315866': 434, '315877': 434, '315899': 434, '121825': 435, '121836': 435, '121841': 435, '121854': 435, '316145': 436, '33309': 437, '294700': 438, '297360': 439, '298602': 440, '317986': 441, '298670': 440, '301640': 442, '302476': 443, '302787': 444, '302866': 444, '303214': 445, '305648': 446, '305857': 447, '306879': 82, '306921': 82, '308678': 448, '309983': 449, '310373': 305, '310554': 450, '182826': 451, '12910': 452, '310768': 453, '310829': 453, '195628': 454, '195663': 454, '195688': 454, '310914': 455, '187390': 307, '313568': 456, '182907': 457, '319391': 458, '183060': 459, '183061': 459, '183069': 459, '183113': 459, '183144': 459, '183176': 459, '183194': 459, '316306': 460, '33348': 461, '68035': 462, '316519': 463, '126895': 464, '28904': 465, '42532': 466, '24557': 467, '317044': 468, '183297': 469, '183298': 469, '183304': 469, '183305': 469, '183318': 469, '317444': 470, '310933': 455, '317531': 470, '12922': 471, '12924': 471, '117064': 472, '183350': 473, '317643': 474, '320058': 475, '320118': 475, '320168': 475, '68821': 476, '248950': 477, '317709': 478, '317765': 441, '321160': 479, '321261': 479, '321303': 479, '321529': 479, '195752': 324, '317809': 441, '195770': 324, '12932': 480, '12937': 480, '318110': 441, '52657': 481, '52973': 481, '318253': 441, '127007': 482, '318313': 441, '311021': 483, '68084': 484, '68097': 484, '68104': 484, '60309': 485, '202107': 486, '202118': 486, '202134': 486, '143674': 487, '143676': 487, '143697': 487, '143706': 487, '143710': 487, '143731': 487, '143749': 487, '143755': 487, '143758': 487, '143780': 487, '143783': 487, '242361': 488, '15089': 77, '318639': 441, '320188': 489, '204499': 490, '318675': 441, '318743': 441, '318807': 441, '318869': 441, '324133': 491, '15119': 492, '15120': 492, '15135': 492, '15140': 492, '15147': 492, '325055': 493, '29013': 494, '325220': 495, '325552': 496, '325562': 496, '184434': 497, '144341': 498, '144357': 498, '144377': 498, '144384': 498, '156069': 499, '281000': 500, '281017': 500, '327875': 501, '201444': 502, '184849': 503, '311053': 504, '325605': 505, '325648': 506, '15318': 507, '15330': 507, '15336': 507, '15355': 507, '15382': 507, '15409': 507, '306466': 508, '306577': 508, '325681': 506, '185128': 509, '185133': 509}
| 7,015
| 14,503
| 0.572024
| 4,002
| 28,060
| 4.010745
| 0.377561
| 0.000872
| 0.00162
| 0.001744
| 0.004237
| 0.004237
| 0.004237
| 0.004237
| 0.004237
| 0
| 0
| 0.666805
| 0.14273
| 28,060
| 3
| 14,504
| 9,353.333333
| 0.000457
| 0
| 0
| 0
| 0
| 0
| 0.405987
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
3b1346bf542a1609b56ff63aa2665d437c55adfd
| 128
|
py
|
Python
|
app/views/posts.py
|
lswshiwei/flask
|
12e13317d1fa5aae596fb52f8e7219bd1cc4b663
|
[
"Apache-2.0"
] | null | null | null |
app/views/posts.py
|
lswshiwei/flask
|
12e13317d1fa5aae596fb52f8e7219bd1cc4b663
|
[
"Apache-2.0"
] | null | null | null |
app/views/posts.py
|
lswshiwei/flask
|
12e13317d1fa5aae596fb52f8e7219bd1cc4b663
|
[
"Apache-2.0"
] | null | null | null |
from flask import Blueprint
posts = Blueprint('posts', __name__)
@posts.route('/posts/')
def send_posts():
return '回复成功'
| 14.222222
| 36
| 0.695313
| 16
| 128
| 5.25
| 0.6875
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15625
| 128
| 8
| 37
| 16
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0.125
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0.2
| 0.2
| 0.6
| 0.4
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
3b416929db4fcc38556b1a5e59fcff3c49e5f525
| 54
|
py
|
Python
|
codingbat.com/List-1/make_ends.py
|
ahmedelq/PythonicAlgorithms
|
ce10dbb6e1fd0ea5c922a932b0f920236aa411bf
|
[
"MIT"
] | null | null | null |
codingbat.com/List-1/make_ends.py
|
ahmedelq/PythonicAlgorithms
|
ce10dbb6e1fd0ea5c922a932b0f920236aa411bf
|
[
"MIT"
] | null | null | null |
codingbat.com/List-1/make_ends.py
|
ahmedelq/PythonicAlgorithms
|
ce10dbb6e1fd0ea5c922a932b0f920236aa411bf
|
[
"MIT"
] | null | null | null |
def make_ends(nums):
return [nums[0], nums[-1]]
| 18
| 32
| 0.592593
| 9
| 54
| 3.444444
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.046512
| 0.203704
| 54
| 2
| 33
| 27
| 0.674419
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
3b6ea9b1d84ff350b433d5ff4e080f15bd26706b
| 74
|
py
|
Python
|
CrossPy/__init__.py
|
UBC-MDS/CrossPy
|
650778699f1adbd2719483f4e4866117fbe1bb4c
|
[
"MIT"
] | 1
|
2018-02-14T05:37:52.000Z
|
2018-02-14T05:37:52.000Z
|
CrossPy/__init__.py
|
UBC-MDS/CrossPy
|
650778699f1adbd2719483f4e4866117fbe1bb4c
|
[
"MIT"
] | 9
|
2018-02-14T19:57:40.000Z
|
2018-03-23T04:51:25.000Z
|
CrossPy/__init__.py
|
UBC-MDS/CrossPy
|
650778699f1adbd2719483f4e4866117fbe1bb4c
|
[
"MIT"
] | 4
|
2018-02-09T00:13:00.000Z
|
2020-08-06T00:20:29.000Z
|
from CrossPy.CrossPy import train_test_split, cross_validation, summary_cv
| 74
| 74
| 0.891892
| 11
| 74
| 5.636364
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.067568
| 74
| 1
| 74
| 74
| 0.898551
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
8e933a924028e9206b2ca015867c15e7b8d058a9
| 136
|
py
|
Python
|
app/prediction_model.py
|
pdago/euvsvirus-bluebeds-prediction-api
|
829ee1d690495205ce7aa7570a64824145a9e01f
|
[
"MIT"
] | null | null | null |
app/prediction_model.py
|
pdago/euvsvirus-bluebeds-prediction-api
|
829ee1d690495205ce7aa7570a64824145a9e01f
|
[
"MIT"
] | null | null | null |
app/prediction_model.py
|
pdago/euvsvirus-bluebeds-prediction-api
|
829ee1d690495205ce7aa7570a64824145a9e01f
|
[
"MIT"
] | null | null | null |
from abc import ABC
class PredictionModel(ABC):
def predict(self, x):
pass
def prediction_fields(self):
pass
| 13.6
| 32
| 0.632353
| 17
| 136
| 5
| 0.705882
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.294118
| 136
| 9
| 33
| 15.111111
| 0.885417
| 0
| 0
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0.333333
| 0.166667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 5
|
8eac1cc2a7deaacc5e7ce2dfb3ee8066bb6be348
| 43
|
py
|
Python
|
problem/10000~19999/16478/16478.py3.py
|
njw1204/BOJ-AC
|
1de41685725ae4657a7ff94e413febd97a888567
|
[
"MIT"
] | 1
|
2019-04-19T16:37:44.000Z
|
2019-04-19T16:37:44.000Z
|
problem/10000~19999/16478/16478.py3.py
|
njw1204/BOJ-AC
|
1de41685725ae4657a7ff94e413febd97a888567
|
[
"MIT"
] | 1
|
2019-04-20T11:42:44.000Z
|
2019-04-20T11:42:44.000Z
|
problem/10000~19999/16478/16478.py3.py
|
njw1204/BOJ-AC
|
1de41685725ae4657a7ff94e413febd97a888567
|
[
"MIT"
] | 3
|
2019-04-19T16:37:47.000Z
|
2021-10-25T00:45:00.000Z
|
a,b,c=map(int,input().split())
print(a*c/b)
| 21.5
| 30
| 0.627907
| 11
| 43
| 2.454545
| 0.727273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.023256
| 43
| 2
| 31
| 21.5
| 0.642857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0.5
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
8ecc714be412f7334d629fca80c5beaa6b077fe5
| 192
|
py
|
Python
|
openprocurement/tender/twostage/includeme.py
|
leits/openprocurement.tender.twostage
|
2cacf77364bf7ebf74fedf6ddabc8ac600b6d73f
|
[
"Apache-2.0"
] | null | null | null |
openprocurement/tender/twostage/includeme.py
|
leits/openprocurement.tender.twostage
|
2cacf77364bf7ebf74fedf6ddabc8ac600b6d73f
|
[
"Apache-2.0"
] | 2
|
2021-03-26T00:35:15.000Z
|
2022-03-21T22:21:08.000Z
|
openprocurement/tender/twostage/includeme.py
|
leits/openprocurement.tender.twostage
|
2cacf77364bf7ebf74fedf6ddabc8ac600b6d73f
|
[
"Apache-2.0"
] | null | null | null |
from openprocurement.tender.twostage.models import Tender
def includeme(config):
config.add_tender_procurementMethodType(Tender)
config.scan("openprocurement.tender.twostage.views")
| 27.428571
| 57
| 0.817708
| 21
| 192
| 7.380952
| 0.619048
| 0.270968
| 0.374194
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.09375
| 192
| 6
| 58
| 32
| 0.890805
| 0
| 0
| 0
| 0
| 0
| 0.192708
| 0.192708
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
8ee99da55ed28860734bd581765ec938a62f7d23
| 30,816
|
py
|
Python
|
fred/series.py
|
areed1192/us-federal-reserve-python-api
|
bf20e54275c16cfdac7b9fab611163ef1955772d
|
[
"MIT"
] | 5
|
2021-03-23T14:21:37.000Z
|
2021-04-11T17:29:03.000Z
|
fred/series.py
|
areed1192/us-federal-reserve-python-api
|
bf20e54275c16cfdac7b9fab611163ef1955772d
|
[
"MIT"
] | null | null | null |
fred/series.py
|
areed1192/us-federal-reserve-python-api
|
bf20e54275c16cfdac7b9fab611163ef1955772d
|
[
"MIT"
] | 3
|
2021-04-27T23:57:23.000Z
|
2022-01-16T14:59:38.000Z
|
from typing import Dict
from typing import List
from typing import Union
from datetime import datetime
from fred.session import FredSession
# Used for type hinting
todays_date = datetime.today().date().isoformat()
class Series():
"""
## Overview:
----
The Federal Reserve offers a wide variety of data including
series data. Series Data is used for different types of analysis
in economics. This Service will help you capture different series
data nad metadata associated with it.
"""
def __init__(self, session: FredSession) -> None:
"""Initializes the `Series` object.
### Parameters
----
session : `FredSession`
An initialized session of the `FredSession`.
### Usage
----
>>> fred_client = FederalReserveClient(api_key='xxxxxx')
>>> series_service = fred_client.series()
"""
# Set the session.
self.fred_session: FredSession = session
# Set the endpoint.
self.endpoint = '/series'
self._todays_date = datetime.today().date().isoformat()
def __repr__(self) -> str:
"""String representation of the `FederalReserveClient.Series` object."""
# define the string representation
str_representation = '<FederalReserveClient.Series (active=True, connected=True)>'
return str_representation
def get_series(
self,
series_id: str,
realtime_start: Union[str, datetime] = todays_date,
realtime_end: Union[str, datetime] = todays_date
) -> Dict:
"""Get an economic data series.
### Parameters
----------
series_id : str
The series ID you want to query.
realtime_start : Union[str, datetime] (optional, Default=today's date)
The start of the real-time period. For more information, see
Real-Time Periods. YYYY-MM-DD formatted string.
realtime_end : Union[str, datetime] (optional, Default=today's date)
The end of the real-time period. For more information, see
Real-Time Periods. YYYY-MM-DD formatted string.
### Returns
-------
Dict
A collection of `Series` resources.
### Usage
----
>>> fred_client = FederalReserveClient(api_key='xxxxxx')
>>> series_service = fred_client.series()
>>> series_service.get_series(series_id='GNPCA')
"""
content = self.fred_session.make_request(
method='get',
endpoint=self.endpoint,
params={
'series_id': series_id,
'api_key': self.fred_session.client._api_key,
'file_type': 'json',
'realtime_start': realtime_start,
'realtime_end': realtime_end
}
)
return content
def get_series_categories(
self,
series_id: str,
realtime_start: Union[str, datetime] = todays_date,
realtime_end: Union[str, datetime] = todays_date
) -> Dict:
"""Get the categories for an economic data series.
### Parameters
----------
series_id : str
The series ID you want to query.
realtime_start : Union[str, datetime] (optional, Default=today's date)
The start of the real-time period. For more information, see
Real-Time Periods. YYYY-MM-DD formatted string.
realtime_end : Union[str, datetime] (optional, Default=today's date)
The end of the real-time period. For more information, see
Real-Time Periods. YYYY-MM-DD formatted string.
### Returns
-------
Dict
A collection of `Series` resources.
### Usage
----
>>> fred_client = FederalReserveClient(api_key='xxxxxx')
>>> series_service = fred_client.series()
>>> series_service.get_series_categories(series_id='EXJPUS')
"""
content = self.fred_session.make_request(
method='get',
endpoint=self.endpoint + '/categories',
params={
'series_id': series_id,
'api_key': self.fred_session.client._api_key,
'file_type': 'json',
'realtime_start': realtime_start,
'realtime_end': realtime_end
}
)
return content
def get_series_observations(
self,
series_id: str,
realtime_start: Union[str, datetime] = todays_date,
realtime_end: Union[str, datetime] = todays_date,
offset: int = 0,
limit: int = 1000,
sort_order: str = 'asc',
observation_start: Union[str, datetime] = '1776-07-04',
observation_end: Union[str, datetime] = '9999-12-31',
units: str = 'lin',
frequency: str = None,
aggregation_method: str = 'avg',
output_type: int = 1,
vintage_dates: Union[List[str], List[datetime]] = None
) -> Dict:
"""Get the observations or data values for an economic data series.
### Parameters
----------
series_id : str
The series ID you want to query.
realtime_start : Union[str, datetime] (optional, Default=today's date)
The start of the real-time period. For more information, see
Real-Time Periods. YYYY-MM-DD formatted string.
realtime_end : Union[str, datetime] (optional, Default=today's date)
The end of the real-time period. For more information, see
Real-Time Periods. YYYY-MM-DD formatted string.
observation_start : Union[str, datetime] (optional, Default='1776-07-04')
The start of the observation period. YYYY-MM-DD formatted string.
observation_end : Union[str, datetime] (optional, Default='9999-12-31')
The end of the observation period. YYYY-MM-DD formatted string.
offset : int (optional, Default=0)
Non-negative integer.
limit : int (optional, Default=1000)
The maximum number of results to return. Is an integer
between 1 and 1000.
sort_order : str
Sort results is ascending or descending order for attribute values
specified by order_by. One of the following strings: ['asc', 'desc'].
optional, default: `asc`.
units : str (optional, Default='lin')
A key that indicates a data value transformation. One of the following
values: ['lin', 'chg', 'ch1', 'pch', 'pc1', 'pca', 'cch', 'cca', 'log']
For unit transformation formulas, see: https://alfred.stlouisfed.org/help#growth_formulas
frequency : str (optional, Default=None)
An optional parameter that indicates a lower frequency to aggregate values to.
One of the following values: ['d', 'w', 'bw', 'm', 'q', 'sa', 'a', 'wef', 'weth',
'wew', 'wetu', 'wem', 'wesu','wesa', 'bwew', 'bwem'] For more info,
see: https://fred.stlouisfed.org/docs/api/fred/series_observations.html
aggregation_method : str (optional, Default='avg')
A key that indicates the aggregation method used for frequency aggregation.
This parameter has no affect if the frequency parameter is not set. One of
the following values: ['avg', 'sum', 'eop']
output_type : int (optional, Default=1)
An integer that indicates an output type. One of the following values:
['1', '2', '3', '4'].
1 = Observations by Real-Time Period
2 = Observations by Vintage Date, All Observations
3 = Observations by Vintage Date, New and Revised Observations Only
4 = Observations, Initial Release Only
vintage_dates : Union[List[str], List[datetime]] (optional, Default=None)
A comma separated string of YYYY-MM-DD formatted dates in history (e.g. 2000-01-01,2005-02-24).
Vintage dates are used to download data as it existed on these specified dates in history.
Vintage dates can be specified instead of a real-time period using realtime_start and
realtime_end.
### Returns
-------
Dict
A collection of `Series` resources.
### Usage
----
>>> fred_client = FederalReserveClient(api_key='xxxxxx')
>>> series_service = fred_client.series()
>>> series_service.get_series_observations(series_id='GNPCA')
"""
# Conver the date to proper formats.
if vintage_dates:
iso_dates = []
for date in vintage_dates:
if isinstance(date, datetime):
iso_dates.append(date.date().isoformat())
else:
iso_dates.append(iso_dates)
vintage_dates = ','.join(vintage_dates)
content = self.fred_session.make_request(
method='get',
endpoint=self.endpoint + '/observations',
params={
'series_id': series_id,
'api_key': self.fred_session.client._api_key,
'file_type': 'json',
'realtime_start': realtime_start,
'realtime_end': realtime_end,
'observation_start': observation_start,
'observation_end': observation_end,
'offset': offset,
'limit': limit,
'sort_order': sort_order,
'units': units,
'frequency': frequency,
'aggregation_method': aggregation_method,
'output_type': output_type,
'vintage_dates': vintage_dates
}
)
return content
def get_series_release(
self,
series_id: str,
realtime_start: Union[str, datetime] = todays_date,
realtime_end: Union[str, datetime] = todays_date
) -> Dict:
"""Get the release for an economic data series.
### Parameters
----------
series_id : str
The series ID you want to query.
realtime_start : Union[str, datetime] (optional, Default=today's date)
The start of the real-time period. For more information, see
Real-Time Periods. YYYY-MM-DD formatted string.
realtime_end : Union[str, datetime] (optional, Default=today's date)
The end of the real-time period. For more information, see
Real-Time Periods. YYYY-MM-DD formatted string.
### Returns
-------
Dict
A collection of `Series` resources.
### Usage
----
>>> fred_client = FederalReserveClient(api_key='xxxxxx')
>>> series_service = fred_client.series()
>>> series_service.get_series_release(series_id='IRA')
"""
content = self.fred_session.make_request(
method='get',
endpoint=self.endpoint + '/release',
params={
'series_id': series_id,
'api_key': self.fred_session.client._api_key,
'file_type': 'json',
'realtime_start': realtime_start,
'realtime_end': realtime_end
}
)
return content
def series_search(
self,
search_text: str,
realtime_start: Union[str, datetime] = todays_date,
realtime_end: Union[str, datetime] = todays_date,
offset: int = 0,
limit: int = 1000,
sort_order: str = 'asc',
order_by: str = 'series_id',
search_type: str = 'full_text',
filter_variable: str = None,
filter_value: str = None,
tag_names: List[str] = None,
exclude_tag_names: List[str] = None
) -> Dict:
"""Get economic data series that match search text.
### Parameters
----------
search_text : str
The words to match against economic data series.
realtime_start : Union[str, datetime] (optional, Default=today's date)
The start of the real-time period. For more information, see
Real-Time Periods. YYYY-MM-DD formatted string.
realtime_end : Union[str, datetime] (optional, Default=today's date)
The end of the real-time period. For more information, see
Real-Time Periods. YYYY-MM-DD formatted string.
observation_start : Union[str, datetime] (optional, Default='1776-07-04')
The start of the observation period. YYYY-MM-DD formatted string.
observation_end : Union[str, datetime] (optional, Default='9999-12-31')
The end of the observation period. YYYY-MM-DD formatted string.
offset : int (optional, Default=0)
Non-negative integer.
limit : int (optional, Default=1000)
The maximum number of results to return. Is an integer
between 1 and 1000.
sort_order : str (optional, Default='asc')
Sort results is ascending or descending order for attribute values
specified by order_by. One of the following strings: ['asc', 'desc'].
order_by : str (optional, Default='series_id')
One of the following strings: ['search_rank', 'series_id', 'title',
'units', 'frequency', 'seasonal_adjustment', 'realtime_start', 'realtime_end',
'last_updated', 'observation_start', 'observation_end', 'popularity',
'group_popularity']
search_type : str (optional, Default='full_text')
Determines the type of search to perform. One of the following strings:
['full_text', 'series_id']. 'full_text' searches series attributes title,
units, frequency, and tags by parsing words into stems. 'series_id' performs
a substring search on series IDs.
filter_variable : str (optional, Default=None)
The attribute to filter results by. One of the following strings:
['frequency', 'units', 'seasonal_adjustment'].
filter_value : str (optional, Default=None)
The value of the filter_variable attribute to filter results by.
tag_names : List[str] (optional, Default=None)
A list of tag names that series match all of.
exclude_tag_names : List[str] (optional, Default=None)
A list of tag names that series match None of. Parameter exclude_tag_names
requires that parameter tag_names also be set to limit the number of
matching series.
### Returns
-------
Dict
A collection of `Series` resources.
### Usage
----
>>> fred_client = FederalReserveClient(api_key='xxxxxx')
>>> series_service = fred_client.series()
>>> series_service.series_search(search_text='Monetary Service Index')
"""
content = self.fred_session.make_request(
method='get',
endpoint=self.endpoint + '/search',
params={
'search_text': search_text,
'search_type': search_type,
'api_key': self.fred_session.client._api_key,
'file_type': 'json',
'realtime_start': realtime_start,
'realtime_end': realtime_end,
'offset': offset,
'limit': limit,
'sort_order': sort_order,
'order_by': order_by,
'filter_variable': filter_variable,
'filter_value': filter_value,
'tag_names': tag_names,
'exclude_tag_names': exclude_tag_names,
}
)
return content
def series_tag_search(
self,
series_search_text: str,
realtime_start: Union[str, datetime] = todays_date,
realtime_end: Union[str, datetime] = todays_date,
offset: int = 0,
limit: int = 1000,
sort_order: str = 'asc',
order_by: str = 'series_count',
tag_group_id: str = None,
tag_search_text: str = None,
tag_names: List[str] = None,
) -> Dict:
"""Get the FRED tags for a series search. Optionally,
filter results by tag name, tag group, or tag search.
See the related request fred/series/search/related_tags.
### Parameters
----------
search_text : str
The words to match against economic data series.
realtime_start : Union[str, datetime] (optional, Default=today's date)
The start of the real-time period. For more information, see
Real-Time Periods. YYYY-MM-DD formatted string.
realtime_end : Union[str, datetime] (optional, Default=today's date)
The end of the real-time period. For more information, see
Real-Time Periods. YYYY-MM-DD formatted string.
observation_start : Union[str, datetime] (optional, Default='1776-07-04')
The start of the observation period. YYYY-MM-DD formatted string.
observation_end : Union[str, datetime] (optional, Default='9999-12-31')
The end of the observation period. YYYY-MM-DD formatted string.
offset : int (optional, Default=0)
Non-negative integer.
limit : int (optional, Default=1000)
The maximum number of results to return. Is an integer
between 1 and 1000.
sort_order : str (optional, Default='asc')
Sort results is ascending or descending order for attribute values
specified by order_by. One of the following strings: ['asc', 'desc'].
order_by : str (optional, Default='series_count')
One of the following strings: ['series_count', 'popularity', 'created',
'name', 'group_id']
tag_group_id : str (optional, Default=None)
A tag group id to filter tags by type. One of the following: ['freq', 'gen',
'geo', 'geot', 'rls', 'seas', 'src'].
tag_search_text : str (optional, Default=None)
The words to find matching tags with.
tag_names : List[str] (optional, Default=None)
A list of tag names that series match all of.
### Returns
-------
Dict
A collection of `Series` resources.
### Usage
----
>>> fred_client = FederalReserveClient(api_key='xxxxxx')
>>> series_service = fred_client.series()
>>> series_service.series_tag_search(series_search_text='Monetary Service Index')
"""
content = self.fred_session.make_request(
method='get',
endpoint=self.endpoint + '/search/tags',
params={
'series_search_text': series_search_text,
'api_key': self.fred_session.client._api_key,
'file_type': 'json',
'realtime_start': realtime_start,
'realtime_end': realtime_end,
'offset': offset,
'limit': limit,
'sort_order': sort_order,
'order_by': order_by,
'tag_group_id': tag_group_id,
'tag_search_text': tag_search_text,
'tag_names': tag_names,
}
)
return content
def series_releated_tags_search(
self,
series_search_text: str,
realtime_start: Union[str, datetime] = todays_date,
realtime_end: Union[str, datetime] = todays_date,
offset: int = 0,
limit: int = 1000,
sort_order: str = 'asc',
order_by: str = 'series_count',
tag_group_id: str = None,
tag_search_text: str = None,
tag_names: List[str] = None,
exclude_tag_names: List[str] = None,
) -> Dict:
"""Get the FRED tags for a series search. Optionally,
filter results by tag name, tag group, or tag search.
See the related request fred/series/search/related_tags.
### Parameters
----------
search_text : str
The words to match against economic data series.
realtime_start : Union[str, datetime] (optional, Default=today's date)
The start of the real-time period. For more information, see
Real-Time Periods. YYYY-MM-DD formatted string.
realtime_end : Union[str, datetime] (optional, Default=today's date)
The end of the real-time period. For more information, see
Real-Time Periods. YYYY-MM-DD formatted string.
observation_start : Union[str, datetime] (optional, Default='1776-07-04')
The start of the observation period. YYYY-MM-DD formatted string.
observation_end : Union[str, datetime] (optional, Default='9999-12-31')
The end of the observation period. YYYY-MM-DD formatted string.
offset : int (optional, Default=0)
Non-negative integer.
limit : int (optional, Default=1000)
The maximum number of results to return. Is an integer
between 1 and 1000.
sort_order : str (optional, Default='asc')
Sort results is ascending or descending order for attribute values
specified by order_by. One of the following strings: ['asc', 'desc'].
order_by : str (optional, Default='series_count')
One of the following strings: ['series_count', 'popularity', 'created',
'name', 'group_id']
tag_group_id : str (optional, Default=None)
A tag group id to filter tags by type. One of the following: ['freq', 'gen',
'geo', 'geot', 'rls', 'seas', 'src'].
tag_search_text : str (optional, Default=None)
The words to find matching tags with.
tag_names : List[str] (optional, Default=None)
A list of tag names that series match all of.
exclude_tag_names : List[str] (optional, Default=None)
A list of tag names that series match None of. Parameter exclude_tag_names
requires that parameter tag_names also be set to limit the number of
matching series.
### Returns
-------
Dict
A collection of `Series` resources.
### Usage
----
>>> fred_client = FederalReserveClient(api_key='xxxxxx')
>>> series_service = fred_client.series()
>>> series_service.series_releated_tags_search(
series_search_text='Mortgage Rates',
tag_names=['30-year', 'frb']
)
"""
content = self.fred_session.make_request(
method='get',
endpoint=self.endpoint + '/search/related_tags',
params={
'series_search_text': series_search_text,
'api_key': self.fred_session.client._api_key,
'file_type': 'json',
'realtime_start': realtime_start,
'realtime_end': realtime_end,
'offset': offset,
'limit': limit,
'sort_order': sort_order,
'order_by': order_by,
'tag_group_id': tag_group_id,
'tag_search_text': tag_search_text,
'tag_names': tag_names,
'exclude_tag_names': exclude_tag_names
}
)
return content
def get_series_tags(
self,
series_id: str,
realtime_start: Union[str, datetime] = todays_date,
realtime_end: Union[str, datetime] = todays_date,
sort_order: str = 'asc',
order_by: str = 'series_count',
) -> Dict:
"""Get the FRED tags for a series.
### Parameters
----------
series_id : str
The ID for a series.
realtime_start : Union[str, datetime] (optional, Default=today's date)
The start of the real-time period. For more information, see
Real-Time Periods. YYYY-MM-DD formatted string.
realtime_end : Union[str, datetime] (optional, Default=today's date)
The end of the real-time period. For more information, see
Real-Time Periods. YYYY-MM-DD formatted string.
sort_order : str (optional, Default='asc')
Sort results is ascending or descending order for attribute values
specified by order_by. One of the following strings: ['asc', 'desc'].
order_by : str (optional, Default='series_count')
One of the following strings: ['series_count', 'popularity', 'created',
'name', 'group_id']
### Returns
-------
Dict
A collection of `Series.Tags` resources.
### Usage
----
>>> fred_client = FederalReserveClient(api_key='xxxxxx')
>>> series_service = fred_client.series()
>>> series_service.get_series_tags(series_id='STLFSI')
"""
content = self.fred_session.make_request(
method='get',
endpoint=self.endpoint + '/tags',
params={
'series_id': series_id,
'api_key': self.fred_session.client._api_key,
'file_type': 'json',
'realtime_start': realtime_start,
'realtime_end': realtime_end,
'sort_order': sort_order,
'order_by': order_by,
}
)
return content
def get_series_updates(
self,
realtime_start: Union[str, datetime] = todays_date,
realtime_end: Union[str, datetime] = todays_date,
offset: int = 0,
limit: int = 1000,
filter_value: str = 'all',
start_time: str = None,
end_time: str = None
) -> Dict:
"""Get economic data series sorted by when observations were updated on the
FRED® server (attribute last_updated). Results are limited to series updated
within the last two weeks.
### Parameters
----------
realtime_start : Union[str, datetime] (optional, Default=today's date)
The start of the real-time period. For more information, see
Real-Time Periods. YYYY-MM-DD formatted string.
realtime_end : Union[str, datetime] (optional, Default=today's date)
The end of the real-time period. For more information, see
Real-Time Periods. YYYY-MM-DD formatted string.
offset : int (optional, Default=0)
Non-negative integer.
limit : int (optional, Default=1000)
The maximum number of results to return. Is an integer
between 1 and 1000.
filter_value : str (optional, Default='all')
Limit results by geographic type of economic data series;
namely 'macro', 'regional', and 'all'.
start_time: str (optional, Default=None)
Start time for limiting results for a time range, can filter
down to minutes. YYYYMMDDHhmm formatted string end_time is
required if start_time is set
end_time: str (optional, Default=None)
End time for limiting results for a time range, can filter
down to minutes. YYYYMMDDHhmm formatted string start_time is
required if end_time is set
### Returns
-------
Dict
A collection of `Series` resources.
### Usage
----
>>> fred_client = FederalReserveClient(api_key='xxxxxx')
>>> series_service = fred_client.series()
>>> series_service.get_series_updates()
"""
content = self.fred_session.make_request(
method='get',
endpoint=self.endpoint + '/updates',
params={
'api_key': self.fred_session.client._api_key,
'file_type': 'json',
'realtime_start': realtime_start,
'realtime_end': realtime_end,
'offset': offset,
'limit': limit,
'filter_value': filter_value,
'start_time': start_time,
'end_time': end_time
}
)
return content
def get_series_vintage_dates(
self,
series_id: str,
realtime_start: Union[str, datetime] = '1776-07-04',
realtime_end: Union[str, datetime] = '9999-12-31',
offset: int = 0,
limit: int = 1000,
sort_order: str = 'asc',
) -> Dict:
"""Get the dates in history when a series' data values were revised or
new data values were released. Vintage dates are the release dates for
a series excluding release dates when the data for the series did not
change.
### Parameters
----------
series_id : str
The ID for a series.
realtime_start : Union[str, datetime] (optional, Default='1776-07-04')
The start of the real-time period. For more information, see
Real-Time Periods. YYYY-MM-DD formatted string.
realtime_end : Union[str, datetime] (optional, Default='9999-12-31')
The end of the real-time period. For more information, see
Real-Time Periods. YYYY-MM-DD formatted string.
offset : int (optional, Default=0)
Non-negative integer.
limit : int (optional, Default=1000)
The maximum number of results to return. Is an integer
between 1 and 1000.
sort_order : str (optional, Default='asc')
Sort results is ascending or descending order for attribute values
specified by order_by. One of the following strings: ['asc', 'desc'].
### Returns
-------
Dict
A collection of `Series` resources.
### Usage
----
>>> fred_client = FederalReserveClient(api_key='xxxxxx')
>>> series_service = fred_client.series()
>>> series_service.get_series_vintage_dates(series_id='GNPCA')
"""
content = self.fred_session.make_request(
method='get',
endpoint=self.endpoint + '/vintagedates',
params={
'series_id': series_id,
'api_key': self.fred_session.client._api_key,
'file_type': 'json',
'realtime_start': realtime_start,
'realtime_end': realtime_end,
'offset': offset,
'limit': limit,
'sort_order': sort_order,
}
)
return content
| 37.352727
| 107
| 0.576778
| 3,479
| 30,816
| 4.953147
| 0.091693
| 0.060933
| 0.046425
| 0.02861
| 0.781859
| 0.755745
| 0.746692
| 0.734331
| 0.73085
| 0.723712
| 0
| 0.011157
| 0.325188
| 30,816
| 824
| 108
| 37.398058
| 0.817456
| 0.553122
| 0
| 0.670139
| 0
| 0
| 0.121781
| 0.002613
| 0
| 0
| 0
| 0
| 0
| 1
| 0.041667
| false
| 0
| 0.017361
| 0
| 0.100694
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
8eeff503d02014ef30d85f3892ddaa61fd6111d0
| 74
|
py
|
Python
|
wiggler/__init__.py
|
jlowe77/Eris-Cogs
|
2ade8f82db3477527af3cff3b48ebb281e1a6987
|
[
"Apache-2.0"
] | 6
|
2020-05-13T20:43:53.000Z
|
2021-06-23T16:10:13.000Z
|
wiggler/__init__.py
|
jlowe77/Eris-Cogs
|
2ade8f82db3477527af3cff3b48ebb281e1a6987
|
[
"Apache-2.0"
] | 12
|
2019-04-02T13:29:10.000Z
|
2020-03-27T18:07:16.000Z
|
wiggler/__init__.py
|
jlowe77/Eris-Cogs
|
2ade8f82db3477527af3cff3b48ebb281e1a6987
|
[
"Apache-2.0"
] | 9
|
2020-06-07T21:46:54.000Z
|
2022-03-01T22:49:02.000Z
|
from .wiggly import Wiggle
def setup(bot):
bot.add_cog(Wiggle(bot))
| 12.333333
| 28
| 0.702703
| 12
| 74
| 4.25
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.175676
| 74
| 5
| 29
| 14.8
| 0.836066
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
d9016d106cb1642114db2eaa685ba44ec7b1ff1d
| 35
|
py
|
Python
|
whatever.py
|
ktbyers/pynet_test
|
2b047910043bfa3725a67fd3dbd5c4bf223afa26
|
[
"Apache-2.0"
] | null | null | null |
whatever.py
|
ktbyers/pynet_test
|
2b047910043bfa3725a67fd3dbd5c4bf223afa26
|
[
"Apache-2.0"
] | 1
|
2018-02-27T22:40:39.000Z
|
2018-03-10T19:19:29.000Z
|
whatever.py
|
ktbyers/pynet_test
|
2b047910043bfa3725a67fd3dbd5c4bf223afa26
|
[
"Apache-2.0"
] | null | null | null |
print "whatever"
print "whatever2"
| 11.666667
| 17
| 0.771429
| 4
| 35
| 6.75
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.032258
| 0.114286
| 35
| 2
| 18
| 17.5
| 0.83871
| 0
| 0
| 0
| 0
| 0
| 0.485714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
d93bb83226fc36143b62690be847c2c5143adf5e
| 121
|
py
|
Python
|
payment/admin.py
|
Nezale/Cloud_Warehouse_Project
|
505dd073862b9f2e49ae76eea255dc7a38175f79
|
[
"MIT"
] | 2
|
2018-12-05T00:17:35.000Z
|
2018-12-05T00:17:41.000Z
|
payment/admin.py
|
Nezale/Cloud_Warehouse_Project
|
505dd073862b9f2e49ae76eea255dc7a38175f79
|
[
"MIT"
] | 12
|
2019-01-14T22:57:23.000Z
|
2022-03-11T23:38:37.000Z
|
payment/admin.py
|
Nezale/Cloud_Warehouse_Project
|
505dd073862b9f2e49ae76eea255dc7a38175f79
|
[
"MIT"
] | 3
|
2019-01-24T11:32:09.000Z
|
2019-08-26T11:30:31.000Z
|
from django.contrib import admin
from .models import Payment
admin.site.register(Payment)
# Register your models here.
| 17.285714
| 32
| 0.801653
| 17
| 121
| 5.705882
| 0.647059
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.132231
| 121
| 6
| 33
| 20.166667
| 0.92381
| 0.214876
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
d9821fc2aed42202abdafbb6e6ecf31ce416cd36
| 63
|
py
|
Python
|
envy/lib/setup_step/__init__.py
|
magmastonealex/fydp
|
fe3df058c3a7036e7e87ce6e7837b598007d7740
|
[
"MIT"
] | 6
|
2019-06-26T02:32:12.000Z
|
2020-03-01T23:08:37.000Z
|
envy/lib/setup_step/__init__.py
|
magmastonealex/fydp
|
fe3df058c3a7036e7e87ce6e7837b598007d7740
|
[
"MIT"
] | 18
|
2019-06-26T04:08:33.000Z
|
2021-06-01T23:53:08.000Z
|
envy/lib/setup_step/__init__.py
|
envy-project/envy
|
fe3df058c3a7036e7e87ce6e7837b598007d7740
|
[
"MIT"
] | null | null | null |
from .builder import Builder
from .setup_step import SetupStep
| 21
| 33
| 0.84127
| 9
| 63
| 5.777778
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.126984
| 63
| 2
| 34
| 31.5
| 0.945455
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
d98828794db7478bc49ea8dda03e0e402b4595d6
| 130
|
py
|
Python
|
slack-webhook-handler.py
|
jghoman/slack-webhook-handler
|
49ea6d32a7fd03078bd44d847a352b4351735c0d
|
[
"Apache-2.0"
] | null | null | null |
slack-webhook-handler.py
|
jghoman/slack-webhook-handler
|
49ea6d32a7fd03078bd44d847a352b4351735c0d
|
[
"Apache-2.0"
] | null | null | null |
slack-webhook-handler.py
|
jghoman/slack-webhook-handler
|
49ea6d32a7fd03078bd44d847a352b4351735c0d
|
[
"Apache-2.0"
] | null | null | null |
import logging
class SlackWebhookHandler(logging.Handler):
def __init__(self, room, url, emoticon=":truck:"):
pass
| 16.25
| 54
| 0.692308
| 14
| 130
| 6.142857
| 0.928571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.192308
| 130
| 7
| 55
| 18.571429
| 0.819048
| 0
| 0
| 0
| 0
| 0
| 0.054264
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0.25
| 0.25
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 5
|
794c6bf1ba095b131bfb815c8e7257672f9e3643
| 49
|
py
|
Python
|
tests/components/lyric/__init__.py
|
tbarbette/core
|
8e58c3aa7bc8d2c2b09b6bd329daa1c092d52d3c
|
[
"Apache-2.0"
] | 30,023
|
2016-04-13T10:17:53.000Z
|
2020-03-02T12:56:31.000Z
|
tests/components/lyric/__init__.py
|
jagadeeshvenkatesh/core
|
1bd982668449815fee2105478569f8e4b5670add
|
[
"Apache-2.0"
] | 31,101
|
2020-03-02T13:00:16.000Z
|
2022-03-31T23:57:36.000Z
|
tests/components/lyric/__init__.py
|
jagadeeshvenkatesh/core
|
1bd982668449815fee2105478569f8e4b5670add
|
[
"Apache-2.0"
] | 11,956
|
2016-04-13T18:42:31.000Z
|
2020-03-02T09:32:12.000Z
|
"""Tests for the Honeywell Lyric integration."""
| 24.5
| 48
| 0.734694
| 6
| 49
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.122449
| 49
| 1
| 49
| 49
| 0.837209
| 0.857143
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
798cf210ba9a30098801b4860ab0210c53bebbdc
| 194
|
py
|
Python
|
zsxq/webdriver/support.py
|
xdfbb/zsxqbackup
|
8e38cbf497dea41bb45c303271f45fa01c2aefd8
|
[
"MIT"
] | 45
|
2021-04-18T10:47:17.000Z
|
2022-03-06T02:07:38.000Z
|
zsxq/webdriver/support.py
|
xdfbb/zsxqbackup
|
8e38cbf497dea41bb45c303271f45fa01c2aefd8
|
[
"MIT"
] | 4
|
2021-04-02T11:08:52.000Z
|
2021-09-08T08:35:58.000Z
|
zsxq/webdriver/support.py
|
xdfbb/zsxqbackup
|
8e38cbf497dea41bb45c303271f45fa01c2aefd8
|
[
"MIT"
] | 15
|
2021-04-09T07:16:58.000Z
|
2022-03-22T02:08:17.000Z
|
from selenium import webdriver
class AutoClosableChrome(webdriver.Chrome):
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.quit()
| 19.4
| 50
| 0.695876
| 24
| 194
| 5.166667
| 0.708333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.221649
| 194
| 9
| 51
| 21.555556
| 0.821192
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.166667
| 0.166667
| 0.833333
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
79b216b3f244f1ee7b69bcea1e0b516430976400
| 29
|
py
|
Python
|
hashbrown/__init__.py
|
potatolondon/django-hashbrown
|
bb78243b649ddc7a8acb66bbbd5c2643ba7bfca0
|
[
"BSD-2-Clause"
] | 13
|
2015-02-06T12:07:23.000Z
|
2022-03-18T23:20:22.000Z
|
hashbrown/__init__.py
|
potatolondon/django-hashbrown
|
bb78243b649ddc7a8acb66bbbd5c2643ba7bfca0
|
[
"BSD-2-Clause"
] | 3
|
2015-03-09T10:23:55.000Z
|
2018-08-29T09:42:32.000Z
|
hashbrown/__init__.py
|
potatolondon/django-hashbrown
|
bb78243b649ddc7a8acb66bbbd5c2643ba7bfca0
|
[
"BSD-2-Clause"
] | 4
|
2016-07-20T14:08:06.000Z
|
2019-07-18T09:30:07.000Z
|
from .utils import is_active
| 14.5
| 28
| 0.827586
| 5
| 29
| 4.6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.137931
| 29
| 1
| 29
| 29
| 0.92
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
8dc2c8902b4d74e09e76581ee80d01d4d43cbb79
| 322
|
py
|
Python
|
ccapi/interfaces/__init__.py
|
paragonjh/ccapis
|
b6c70d31853e7c6c7dc128a8ca6f6890f10b5dd4
|
[
"MIT"
] | 1
|
2019-05-20T18:39:20.000Z
|
2019-05-20T18:39:20.000Z
|
ccapi/interfaces/__init__.py
|
paragonjh/ccapis
|
b6c70d31853e7c6c7dc128a8ca6f6890f10b5dd4
|
[
"MIT"
] | null | null | null |
ccapi/interfaces/__init__.py
|
paragonjh/ccapis
|
b6c70d31853e7c6c7dc128a8ca6f6890f10b5dd4
|
[
"MIT"
] | null | null | null |
from ccapi.interfaces.bitfinex import Bitfinex
from ccapi.interfaces.bittrex import Bittrex
from ccapi.interfaces.poloniex import Poloniex
from ccapi.interfaces.bithumb import Bithumb
from ccapi.interfaces.coinone import Coinone
from ccapi.interfaces.korbit import Korbit
# from ccapis.interfaces.coinbase import Coinbase
| 40.25
| 49
| 0.863354
| 42
| 322
| 6.619048
| 0.285714
| 0.194245
| 0.410072
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090062
| 322
| 7
| 50
| 46
| 0.948805
| 0.145963
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
8dcf74423733d6e0aea590ccb55a955db57a786c
| 235
|
py
|
Python
|
tf_cylindrical/__init__.py
|
lightwell64646/cylindricalsfmlearner
|
2291d30eaa8318961e4e9eeed616e2aa9b22583c
|
[
"MIT"
] | null | null | null |
tf_cylindrical/__init__.py
|
lightwell64646/cylindricalsfmlearner
|
2291d30eaa8318961e4e9eeed616e2aa9b22583c
|
[
"MIT"
] | null | null | null |
tf_cylindrical/__init__.py
|
lightwell64646/cylindricalsfmlearner
|
2291d30eaa8318961e4e9eeed616e2aa9b22583c
|
[
"MIT"
] | null | null | null |
"""High-level operations for cylindrically-shaped data"""
from tf_cylindrical.layers import conv2d, convolution2d, linear, resnet, flatten, conv2dTranspose, attention, reverseAttention
from tf_cylindrical.pad import wrap, unwrap
| 47
| 128
| 0.808511
| 27
| 235
| 6.962963
| 0.851852
| 0.06383
| 0.180851
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.014423
| 0.114894
| 235
| 4
| 129
| 58.75
| 0.889423
| 0.217021
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
8dda5d322852564ab0798a26f5ac8d346af0e764
| 261
|
py
|
Python
|
generated-libraries/python/netapp/kerberos/kdc_vendor.py
|
radekg/netapp-ontap-lib-get
|
6445ebb071ec147ea82a486fbe9f094c56c5c40d
|
[
"MIT"
] | 2
|
2017-03-28T15:31:26.000Z
|
2018-08-16T22:15:18.000Z
|
generated-libraries/python/netapp/kerberos/kdc_vendor.py
|
radekg/netapp-ontap-lib-get
|
6445ebb071ec147ea82a486fbe9f094c56c5c40d
|
[
"MIT"
] | null | null | null |
generated-libraries/python/netapp/kerberos/kdc_vendor.py
|
radekg/netapp-ontap-lib-get
|
6445ebb071ec147ea82a486fbe9f094c56c5c40d
|
[
"MIT"
] | null | null | null |
class KdcVendor(basestring):
"""
Kerberos Key Distribution Center (KDC) Vendor
Possible values:
<ul>
<li> "microsoft" ,
<li> "other"
</ul>
"""
@staticmethod
def get_api_name():
return "kdc-vendor"
| 17.4
| 49
| 0.536398
| 25
| 261
| 5.52
| 0.84
| 0.130435
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.337165
| 261
| 14
| 50
| 18.642857
| 0.797688
| 0.417625
| 0
| 0
| 0
| 0
| 0.086207
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0
| 0.25
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
8de52167251c58aedd39e9f8ea8f849814bb4eaf
| 159
|
py
|
Python
|
codeinthedark/admin.py
|
SetaHoefnagel/CodeInTheDark
|
53320e25cf135621bbc143c2f0ac29de46790871
|
[
"MIT"
] | null | null | null |
codeinthedark/admin.py
|
SetaHoefnagel/CodeInTheDark
|
53320e25cf135621bbc143c2f0ac29de46790871
|
[
"MIT"
] | 3
|
2021-04-08T21:08:40.000Z
|
2021-06-10T19:42:32.000Z
|
codeinthedark/admin.py
|
SetaHoefnagel/CodeInTheDark
|
53320e25cf135621bbc143c2f0ac29de46790871
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import Room, Contestant
# Register your models here.
admin.site.register(Room)
admin.site.register(Contestant)
| 19.875
| 36
| 0.805031
| 22
| 159
| 5.818182
| 0.545455
| 0.140625
| 0.265625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.113208
| 159
| 7
| 37
| 22.714286
| 0.907801
| 0.163522
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
30cd1489da3850e232f25760cf713e4a9b24fc34
| 121
|
py
|
Python
|
python_multiprocess_crawler/__init__.py
|
Kvintus/python_multiprocess_crawler
|
7bb86c1261d7293552c6fc01d81a7e5968a02c09
|
[
"Apache-2.0"
] | null | null | null |
python_multiprocess_crawler/__init__.py
|
Kvintus/python_multiprocess_crawler
|
7bb86c1261d7293552c6fc01d81a7e5968a02c09
|
[
"Apache-2.0"
] | null | null | null |
python_multiprocess_crawler/__init__.py
|
Kvintus/python_multiprocess_crawler
|
7bb86c1261d7293552c6fc01d81a7e5968a02c09
|
[
"Apache-2.0"
] | null | null | null |
# VScode intelisense wouldn't work without this file
from .main import CrawlerBase
# name = "python_multiprocess_crawler"
| 40.333333
| 52
| 0.818182
| 16
| 121
| 6.0625
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.123967
| 121
| 3
| 53
| 40.333333
| 0.915094
| 0.719008
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
30e7c6eafee819794db27fd35f926ae09acfd29b
| 20
|
py
|
Python
|
checkov/version.py
|
new23d/checkov
|
a5e77979d082b72660924fee73f0bbd0611bf5d1
|
[
"Apache-2.0"
] | null | null | null |
checkov/version.py
|
new23d/checkov
|
a5e77979d082b72660924fee73f0bbd0611bf5d1
|
[
"Apache-2.0"
] | null | null | null |
checkov/version.py
|
new23d/checkov
|
a5e77979d082b72660924fee73f0bbd0611bf5d1
|
[
"Apache-2.0"
] | null | null | null |
version = '2.0.138'
| 10
| 19
| 0.6
| 4
| 20
| 3
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.294118
| 0.15
| 20
| 1
| 20
| 20
| 0.411765
| 0
| 0
| 0
| 0
| 0
| 0.35
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
a51a9642113743fe42df4edf603ea9637e1bf8a5
| 2,785
|
py
|
Python
|
quiz/migrations/0001_initial.py
|
Den4200/fbla-quiz
|
9f4d34dbaad5b7d03f3784ed19402117ec750641
|
[
"MIT"
] | null | null | null |
quiz/migrations/0001_initial.py
|
Den4200/fbla-quiz
|
9f4d34dbaad5b7d03f3784ed19402117ec750641
|
[
"MIT"
] | 4
|
2021-06-05T00:05:28.000Z
|
2021-09-22T19:43:38.000Z
|
quiz/migrations/0001_initial.py
|
Den4200/fbla-quiz
|
9f4d34dbaad5b7d03f3784ed19402117ec750641
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.1.3 on 2020-12-01 03:04
import django.contrib.postgres.fields
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='ChoiceQuizEntry',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('question', models.CharField(help_text='The question of the entry.', max_length=4096)),
('answer', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=256), help_text='The answers to the question.', size=None)),
('answer_choices', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=256), help_text='All possible choices for a question, including the answers themselves.', size=None)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='MultipleChoiceQuizEntry',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('question', models.CharField(help_text='The question of the entry.', max_length=4096)),
('answer', models.CharField(help_text='The answer to the question.', max_length=256)),
('answer_choices', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=256), help_text='All possible choices for a question, including the answer itself.', size=None)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='ShortAnswerQuizEntry',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('question', models.CharField(help_text='The question of the entry.', max_length=4096)),
('answer', models.CharField(help_text='The answer to the question.', max_length=256)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='TrueFalseQuizEntry',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('question', models.CharField(help_text='The question of the entry.', max_length=4096)),
('answer', models.BooleanField(help_text='The answer to the question.')),
],
options={
'abstract': False,
},
),
]
| 44.919355
| 218
| 0.59246
| 285
| 2,785
| 5.663158
| 0.245614
| 0.049566
| 0.054523
| 0.085502
| 0.776952
| 0.776952
| 0.749071
| 0.730483
| 0.664808
| 0.664808
| 0
| 0.022989
| 0.281508
| 2,785
| 61
| 219
| 45.655738
| 0.783608
| 0.016158
| 0
| 0.62963
| 1
| 0
| 0.203068
| 0.0084
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.037037
| 0
| 0.111111
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
eb4bf2f5325a4cf12666c425b0d4a828845bf233
| 229
|
py
|
Python
|
reinforcement/agents/__init__.py
|
hoxmark/Deep_reinforcement_active_learning
|
7458916d6f75c7fbfcfd4bc81763ab5ba16208ad
|
[
"MIT"
] | 19
|
2018-03-19T12:08:18.000Z
|
2021-08-14T09:12:33.000Z
|
reinforcement/agents/__init__.py
|
hoxmark/TDT4501-Specialization-Project
|
7458916d6f75c7fbfcfd4bc81763ab5ba16208ad
|
[
"MIT"
] | 20
|
2020-01-28T22:14:40.000Z
|
2022-03-11T23:17:48.000Z
|
reinforcement/agents/__init__.py
|
hoxmark/Deep_reinforcement_active_learning
|
7458916d6f75c7fbfcfd4bc81763ab5ba16208ad
|
[
"MIT"
] | 6
|
2018-07-25T08:07:45.000Z
|
2021-08-14T09:12:34.000Z
|
from agents.dqn_agent import DQNAgent
from agents.dqn_target_agent import DQNTargetAgent
from agents.reinforce_agent import PolicyAgent
from agents.actor_critic import ActorCriticAgent
from agents.random_agent import RandomAgent
| 38.166667
| 50
| 0.89083
| 31
| 229
| 6.387097
| 0.483871
| 0.252525
| 0.131313
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.087336
| 229
| 5
| 51
| 45.8
| 0.947368
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
eb5a509a5d4693ea9e457a110b214db37ce0e430
| 123
|
py
|
Python
|
Jose Portilla/Python Bootcamp Go from Zero to Hero in Python 3/Python Errors and Exception Handling/cap.py
|
Anim-101/CourseHub
|
570ddc2bca794c14921991d24fdf1b4a7d0beb68
|
[
"MIT"
] | 3
|
2019-11-01T17:07:13.000Z
|
2020-04-01T10:27:05.000Z
|
Jose Portilla/Python Bootcamp Go from Zero to Hero in Python 3/Python Errors and Exception Handling/cap.py
|
Anim-101/CourseHub
|
570ddc2bca794c14921991d24fdf1b4a7d0beb68
|
[
"MIT"
] | 18
|
2020-08-10T05:11:24.000Z
|
2021-12-03T15:13:40.000Z
|
Jose Portilla/Python Bootcamp Go from Zero to Hero in Python 3/Python Errors and Exception Handling/cap.py
|
Anim-101/CourseHub
|
570ddc2bca794c14921991d24fdf1b4a7d0beb68
|
[
"MIT"
] | null | null | null |
def cap_text(text):
'''
Input a String
Output a Capitalized String
'''
# return text.capitalize()
return text.title()
| 17.571429
| 28
| 0.699187
| 17
| 123
| 5
| 0.647059
| 0.235294
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.170732
| 123
| 7
| 29
| 17.571429
| 0.833333
| 0.552846
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
eb5f48398c8097642f497e51bc4d63f81d0d11ac
| 189
|
py
|
Python
|
phasepapy/phasepicker/__init__.py
|
GeoscienceAustralia/PhasePApy
|
112e712015ac54a32aaa399fa13a2db8cd8f763b
|
[
"CC0-1.0"
] | 2
|
2017-11-10T01:55:52.000Z
|
2021-11-13T04:32:00.000Z
|
phasepapy/phasepicker/__init__.py
|
GeoscienceAustralia/PhasePApy
|
112e712015ac54a32aaa399fa13a2db8cd8f763b
|
[
"CC0-1.0"
] | 11
|
2017-09-12T05:40:13.000Z
|
2017-10-26T03:27:59.000Z
|
phasepapy/phasepicker/__init__.py
|
GeoscienceAustralia/PhasePApy
|
112e712015ac54a32aaa399fa13a2db8cd8f763b
|
[
"CC0-1.0"
] | 3
|
2019-09-02T01:52:47.000Z
|
2020-07-05T15:36:08.000Z
|
""" phasepapy.phasepicker
This package contains modules to make earthquake phase picks.
"""
from .aicdpicker import AICDPicker
from .fbpicker import FBPicker
from .ktpicker import KTPicker
| 27
| 61
| 0.814815
| 23
| 189
| 6.695652
| 0.695652
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.126984
| 189
| 7
| 62
| 27
| 0.933333
| 0.439153
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
eb685d82b9f90b98bacc82400b77055a59dd77ef
| 32
|
py
|
Python
|
simulation/main.py
|
Comp01-sudo/OS
|
d7478fc45c844820e089690ccca9772ee6539f3c
|
[
"MIT"
] | null | null | null |
simulation/main.py
|
Comp01-sudo/OS
|
d7478fc45c844820e089690ccca9772ee6539f3c
|
[
"MIT"
] | null | null | null |
simulation/main.py
|
Comp01-sudo/OS
|
d7478fc45c844820e089690ccca9772ee6539f3c
|
[
"MIT"
] | null | null | null |
import numpy
def main():
| 5.333333
| 12
| 0.5625
| 4
| 32
| 4.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.34375
| 32
| 5
| 13
| 6.4
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.5
| null | null | 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
eb9fc2d7d5a3ca6a33f176d6b223d5393799a3c2
| 131
|
py
|
Python
|
econometrics-with-python/_build/jupyter_execute/ch03/ch03_1.py
|
harrywang/econometrics-with-python
|
438995d95efd9a292bb141493b14a183232b79bb
|
[
"CC0-1.0"
] | null | null | null |
econometrics-with-python/_build/jupyter_execute/ch03/ch03_1.py
|
harrywang/econometrics-with-python
|
438995d95efd9a292bb141493b14a183232b79bb
|
[
"CC0-1.0"
] | null | null | null |
econometrics-with-python/_build/jupyter_execute/ch03/ch03_1.py
|
harrywang/econometrics-with-python
|
438995d95efd9a292bb141493b14a183232b79bb
|
[
"CC0-1.0"
] | null | null | null |
# 3.1 Estimation of the Population Mean
[Book Link](https://www.econometrics-with-r.org/3-1-estimation-of-the-population-mean.html)
| 65.5
| 91
| 0.778626
| 23
| 131
| 4.434783
| 0.695652
| 0.039216
| 0.235294
| 0.27451
| 0.607843
| 0.607843
| 0.607843
| 0
| 0
| 0
| 0
| 0.03252
| 0.061069
| 131
| 2
| 91
| 65.5
| 0.796748
| 0.282443
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
ebcaa799da69066638419cdc49599b31132de2f5
| 39
|
py
|
Python
|
src/lib/FunctionObject.py
|
shayansm2/metaalgolib
|
354e0d215d61b893b7069f5379222cb2df7561fe
|
[
"MIT"
] | null | null | null |
src/lib/FunctionObject.py
|
shayansm2/metaalgolib
|
354e0d215d61b893b7069f5379222cb2df7561fe
|
[
"MIT"
] | null | null | null |
src/lib/FunctionObject.py
|
shayansm2/metaalgolib
|
354e0d215d61b893b7069f5379222cb2df7561fe
|
[
"MIT"
] | null | null | null |
class FunctionObject(object):
pass
| 13
| 29
| 0.74359
| 4
| 39
| 7.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.179487
| 39
| 2
| 30
| 19.5
| 0.90625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
ebdd9e1b7026865604257e2b66b79614c41d031c
| 97,144
|
py
|
Python
|
solentware_base/core/tests/test__nosql.py
|
RogerMarsh/solentware-base
|
bc9a113c2c597e9f23e36e5cc6ba74eed41612d8
|
[
"BSD-3-Clause"
] | null | null | null |
solentware_base/core/tests/test__nosql.py
|
RogerMarsh/solentware-base
|
bc9a113c2c597e9f23e36e5cc6ba74eed41612d8
|
[
"BSD-3-Clause"
] | null | null | null |
solentware_base/core/tests/test__nosql.py
|
RogerMarsh/solentware-base
|
bc9a113c2c597e9f23e36e5cc6ba74eed41612d8
|
[
"BSD-3-Clause"
] | null | null | null |
# test__nosql.py
# Copyright 2019 Roger Marsh
# Licence: See LICENCE (BSD licence)
"""_nosql _database tests
The rest of this docstring probably belongs a lot higher up the package tree.
Originally unit tests were fitted to the packages long after initial write,
because proof of testing is a good thing.
However I have ended up using them in shortish 'code-test' cycles to check the
method just newly written or amended actually succceeds in running, once the
module has a fairly stable structure and enough of it exists to run. Largely
as a consequence of going for relative imports within a package whereever
possible: then python -m <test> is a convenient universal way of seeing if it
runs. I avoided relative imports for a long time because they do not fit well
with idle.
Sometimes a unit test will have an attempt at exhaustive testing too.
"""
# The _nosql and test__nosql modules are written by copying _sqlite and
# test__sqlite, then change test__nosql to do unqlite or vedis things one test
# at a time and replace the SQLite things in _nosql as they get hit.
import unittest
import os
from ast import literal_eval
try:
import unqlite
except ImportError: # Not ModuleNotFoundError for Pythons earlier than 3.6
unqlite = None
try:
import vedis
except ImportError: # Not ModuleNotFoundError for Pythons earlier than 3.6
vedis = None
try:
from ... import ndbm_module
except ImportError: # Not ModuleNotFoundError for Pythons earlier than 3.6
ndbm_module = None
try:
from ... import gnu_module
except ImportError: # Not ModuleNotFoundError for Pythons earlier than 3.6
gnu_module = None
from .. import _nosql
from .. import tree
from .. import filespec
from .. import recordset
from ..segmentsize import SegmentSize
from ..wherevalues import ValuesClause
_NDBM_TEST_ROOT = "___ndbm_test_nosql"
_GNU_TEST_ROOT = "___gnu_test_nosql"
if ndbm_module:
class Ndbm(ndbm_module.Ndbm):
# test__nosql assumes database modules support memory-only databases,
# but ndbm does not support them.
def __init__(self, path=None):
if path is None:
path = os.path.join(os.path.dirname(__file__), _NDBM_TEST_ROOT)
super().__init__(path=path)
if gnu_module:
class Gnu(gnu_module.Gnu):
# test__nosql assumes database modules support memory-only databases,
# but gnu does not support them.
def __init__(self, path=None):
if path is None:
path = os.path.join(os.path.dirname(__file__), _GNU_TEST_ROOT)
super().__init__(path=path)
class _NoSQL(unittest.TestCase):
# The sets of tests are run inside a loop for unqlite and vedis, and some
# tests change SegmentSize.db_segment_size_bytes, so reset it to the
# initial value in tearDown().
def setUp(self):
# UnQLite and Vedis are sufficiently different that the open_database()
# call arguments have to be set differently for these engines.
if dbe_module is unqlite:
self._oda = dbe_module, dbe_module.UnQLite, dbe_module.UnQLiteError
elif dbe_module is vedis:
self._oda = dbe_module, dbe_module.Vedis, None
elif dbe_module is ndbm_module:
self._oda = dbe_module, Ndbm, None
elif dbe_module is gnu_module:
self._oda = dbe_module, Gnu, None
self.__ssb = SegmentSize.db_segment_size_bytes
class _D(_nosql.Database):
pass
self._D = _D
def tearDown(self):
self.database = None
self._D = None
SegmentSize.db_segment_size_bytes = self.__ssb
# I have no idea why the database teardown for ndbm has to be like so:
if dbe_module is ndbm_module:
path = os.path.join(
os.path.dirname(__file__), ".".join((_NDBM_TEST_ROOT, "db"))
)
if os.path.isdir(path):
for f in os.listdir(path):
os.remove(os.path.join(path, f))
os.rmdir(path)
elif os.path.isfile(
path
): # Most tests, other two each have a few.
os.remove(path)
path = os.path.join(os.path.dirname(__file__), _NDBM_TEST_ROOT)
if os.path.isdir(path):
for f in os.listdir(path):
os.remove(os.path.join(path, f))
os.rmdir(path)
# I have no idea why the database teardown for gnu has to be like so:
if dbe_module is gnu_module:
path = os.path.join(os.path.dirname(__file__), _GNU_TEST_ROOT)
if os.path.isfile(path):
os.remove(path)
if os.path.isdir(path):
for f in os.listdir(path):
os.remove(os.path.join(path, f))
os.rmdir(path)
class Database___init__(_NoSQL):
def test_01(self):
self.assertRaisesRegex(
TypeError,
"".join(
(
"__init__\(\) takes from 2 to 5 positional arguments ",
"but 6 were given",
)
),
self._D,
*(None, None, None, None, None),
)
def test_02(self):
# Matches 'type object' before Python 3.9 but class name otherwise.
t = r"(?:type object|solentware_base\.core\.filespec\.FileSpec\(\))"
self.assertRaisesRegex(
TypeError,
"".join(
(
t,
" argument after \*\* must be a mapping, ",
"not NoneType",
)
),
self._D,
*(None,),
)
self.assertIsInstance(self._D({}), self._D)
self.assertIsInstance(self._D(filespec.FileSpec()), self._D)
def test_03(self):
self.assertRaisesRegex(
_nosql.DatabaseError,
"".join(("Database folder name {} is not valid",)),
self._D,
*({},),
**dict(folder={}),
)
def test_04(self):
database = self._D({}, folder="a")
self.assertEqual(
sorted(database.__dict__.keys()),
[
"_initial_segment_size_bytes",
"_real_segment_size_bytes",
"_use_specification_items",
"database_file",
"dbenv",
"ebm_control",
"ebm_segment_count",
"home_directory",
"segment_records",
"segment_size_bytes",
"segment_table",
"specification",
"table",
"table_data",
"trees",
],
)
self.assertIsInstance(database, self._D)
self.assertEqual(os.path.basename(database.home_directory), "a")
self.assertEqual(os.path.basename(database.database_file), "a")
self.assertEqual(
os.path.basename(os.path.dirname(database.database_file)), "a"
)
self.assertEqual(database.specification, {})
self.assertEqual(database.segment_size_bytes, 4000)
self.assertEqual(database.dbenv, None)
self.assertEqual(database.table, {})
self.assertEqual(database.segment_table, {})
self.assertEqual(database.segment_records, {})
self.assertEqual(database.ebm_control, {})
self.assertEqual(database.ebm_segment_count, {})
self.assertEqual(database.trees, {})
self.assertEqual(database._real_segment_size_bytes, False)
self.assertEqual(database._initial_segment_size_bytes, 4000)
self.assertEqual(SegmentSize.db_segment_size_bytes, 4096)
database.set_segment_size()
self.assertEqual(SegmentSize.db_segment_size_bytes, 4000)
def test_05(self):
database = self._D({})
self.assertEqual(database.home_directory, None)
self.assertEqual(database.database_file, None)
# This combination of folder and segment_size_bytes arguments is used for
# unittests, except for one to see a non-memory database with a realistic
# segment size.
def test_06(self):
database = self._D({}, segment_size_bytes=None)
self.assertEqual(database.segment_size_bytes, None)
database.set_segment_size()
self.assertEqual(SegmentSize.db_segment_size_bytes, 16)
# Transaction methods do not raise exceptions if called when no database open
# but do nothing.
class Database_transaction_methods(_NoSQL):
def setUp(self):
super().setUp()
self.database = self._D({})
def test_01_start_transaction(self):
self.assertEqual(self.database.dbenv, None)
self.database.start_transaction()
def test_02_backout(self):
self.assertEqual(self.database.dbenv, None)
self.database.backout()
def test_03_commit(self):
self.assertEqual(self.database.dbenv, None)
self.database.commit()
def test_04(self):
self.assertRaisesRegex(
TypeError,
"".join(
(
"start_transaction\(\) takes 1 positional argument ",
"but 2 were given",
)
),
self.database.start_transaction,
*(None,),
)
self.assertRaisesRegex(
TypeError,
"".join(
(
"backout\(\) takes 1 positional argument ",
"but 2 were given",
)
),
self.database.backout,
*(None,),
)
self.assertRaisesRegex(
TypeError,
"".join(
(
"commit\(\) takes 1 positional argument ",
"but 2 were given",
)
),
self.database.commit,
*(None,),
)
# Methods which do not require database to be open.
class DatabaseInstance(_NoSQL):
def setUp(self):
super().setUp()
self.database = self._D({})
def test_01_validate_segment_size_bytes(self):
self.assertRaisesRegex(
TypeError,
"".join(
(
"_validate_segment_size_bytes\(\) missing 1 required ",
"positional argument: 'segment_size_bytes'",
)
),
self.database._validate_segment_size_bytes,
)
self.assertRaisesRegex(
_nosql.DatabaseError,
"".join(("Database segment size must be an int",)),
self.database._validate_segment_size_bytes,
*("a",),
)
self.assertRaisesRegex(
_nosql.DatabaseError,
"".join(("Database segment size must be more than 0",)),
self.database._validate_segment_size_bytes,
*(0,),
)
self.assertEqual(
self.database._validate_segment_size_bytes(None), None
)
self.assertEqual(self.database._validate_segment_size_bytes(1), None)
def test_02_encode_record_number(self):
self.assertRaisesRegex(
TypeError,
"".join(
(
"encode_record_number\(\) missing 1 required ",
"positional argument: 'key'",
)
),
self.database.encode_record_number,
)
self.assertEqual(self.database.encode_record_number(1), "1")
def test_03_decode_record_number(self):
self.assertRaisesRegex(
TypeError,
"".join(
(
"decode_record_number\(\) missing 1 required ",
"positional argument: 'skey'",
)
),
self.database.decode_record_number,
)
self.assertEqual(self.database.decode_record_number("1"), 1)
def test_04_encode_record_selector(self):
self.assertRaisesRegex(
TypeError,
"".join(
(
"encode_record_selector\(\) missing 1 required ",
"positional argument: 'key'",
)
),
self.database.encode_record_selector,
)
self.assertEqual(self.database.encode_record_selector("a"), "a")
def test_05_make_recordset(self):
self.assertRaisesRegex(
TypeError,
"".join(
(
"recordlist_nil\(\) takes from 2 to 3 positional arguments ",
"but 4 were given",
)
),
self.database.recordlist_nil,
*(None, None, None),
)
self.assertIsInstance(
self.database.recordlist_nil("a"), recordset.RecordList
)
# Memory databases are used for these tests.
class Database_open_database(_NoSQL):
def test_01(self):
self.database = self._D({})
self.assertRaisesRegex(
TypeError,
"".join(
(
"open_database\(\) takes from 4 to 5 positional arguments ",
"but 6 were given",
)
),
self.database.open_database,
*(None, None, None, None, None),
)
self.assertRaisesRegex(
TypeError,
"".join(
(
"close_database\(\) takes 1 positional argument ",
"but 2 were given",
)
),
self.database.close_database,
*(None,),
)
self.assertRaisesRegex(
TypeError,
"".join(
(
"close_database_contexts\(\) takes from 1 to 2 positional ",
"arguments but 3 were given",
)
),
self.database.close_database_contexts,
*(None, None),
)
def test_02(self):
self.database = self._D({})
self.database.open_database(*self._oda)
self.assertEqual(SegmentSize.db_segment_size_bytes, 4000)
self.assertEqual(self.database.home_directory, None)
self.assertEqual(self.database.database_file, None)
self.assertIsInstance(self.database.dbenv, self._oda[1])
def test_03(self):
self.database = self._D({}, segment_size_bytes=None)
self.database.open_database(*self._oda)
self.assertEqual(SegmentSize.db_segment_size_bytes, 16)
self.assertEqual(self.database.home_directory, None)
self.assertEqual(self.database.database_file, None)
self.assertIsInstance(self.database.dbenv, self._oda[1])
def test_04_close_database(self):
self.database = self._D({}, segment_size_bytes=None)
self.database.open_database(*self._oda)
self.database.close_database()
self.assertEqual(self.database.dbenv, None)
self.database.close_database()
self.assertEqual(self.database.dbenv, None)
def test_05_close_database_contexts(self):
self.database = self._D({}, segment_size_bytes=None)
self.database.open_database(*self._oda)
self.database.close_database_contexts()
self.assertEqual(self.database.dbenv, None)
self.database.close_database_contexts()
self.assertEqual(self.database.dbenv, None)
def test_06(self):
self.database = self._D({"file1": {"field1"}})
self.database.open_database(*self._oda)
self.check_specification()
def test_07(self):
self.database = self._D(filespec.FileSpec(**{"file1": {"field1"}}))
self.database.open_database(*self._oda)
self.check_specification()
def test_08(self):
self.database = self._D(
filespec.FileSpec(**{"file1": {"field1"}, "file2": {"field2"}})
)
self.database.open_database(*self._oda, files={"file1"})
self.check_specification()
def test_09(self):
self.database = self._D(
filespec.FileSpec(
**{"file1": {"field1"}, "file2": (), "file3": {"field2"}}
)
)
# No tree for field2 in file3 (without a full FileSpec instance).
self.database.specification["file3"]["fields"]["Field2"][
"access_method"
] = "hash"
self.database.open_database(*self._oda)
self.assertEqual(
self.database.table,
{
"file1": ["1"],
"___control": "0",
"file1_field1": ["1_1"],
"file2": ["2"],
"file3": ["3"],
"file3_field2": ["3_1"],
},
)
self.assertEqual(
self.database.segment_table,
{"file1_field1": "1_1_0", "file3_field2": "3_1_0"},
)
self.assertEqual(
self.database.segment_records,
{"file1_field1": "1_1_1", "file3_field2": "3_1_1"},
)
self.assertEqual(
[k for k in self.database.trees.keys()], ["file1_field1"]
)
self.assertIsInstance(self.database.trees["file1_field1"], tree.Tree)
self.assertEqual(self.database.ebm_control["file1"]._file, "1")
self.assertEqual(
self.database.ebm_control["file1"].ebm_table, "1_0__ebm"
)
self.assertEqual(self.database.ebm_control["file2"]._file, "2")
self.assertEqual(
self.database.ebm_control["file2"].ebm_table, "2_0__ebm"
)
self.assertEqual(self.database.ebm_segment_count, {})
for v in self.database.ebm_control.values():
self.assertIsInstance(v, _nosql.ExistenceBitmapControl)
# Comment in _sqlite.py suggests this method is not needed.
def test_12_is_database_file_active(self):
self.database = self._D(
filespec.FileSpec(**{"file1": {"field1"}, "file2": ()})
)
d = self.database
self.assertEqual(d.is_database_file_active("file1"), False)
d.open_database(*self._oda)
self.assertEqual(d.is_database_file_active("file1"), True)
def check_specification(self):
self.assertEqual(
self.database.table,
{
"file1": ["1"],
"___control": "0",
"file1_field1": ["1_1"],
},
)
self.assertEqual(
self.database.segment_table, {"file1_field1": "1_1_0"}
)
self.assertEqual(
self.database.segment_records, {"file1_field1": "1_1_1"}
)
self.assertEqual(
[k for k in self.database.trees.keys()], ["file1_field1"]
)
self.assertIsInstance(self.database.trees["file1_field1"], tree.Tree)
self.assertEqual(self.database.ebm_control["file1"]._file, "1")
self.assertEqual(
self.database.ebm_control["file1"].ebm_table, "1_0__ebm"
)
self.assertEqual(self.database.ebm_segment_count, {})
for v in self.database.ebm_control.values():
self.assertIsInstance(v, _nosql.ExistenceBitmapControl)
# Memory databases are used for these tests.
# This one has to look like a real application (almost).
# Do not need to catch the self.__class__.SegmentSizeError exception in
# _ED.open_database() method.
class Database_do_database_task(unittest.TestCase):
# The sets of tests are run inside a loop for sqlite3 and apsw, and some
# tests in this set change SegmentSize.db_segment_size_bytes, so reset it
# to the initial value in tearDown().
# _NoSQL does this, but Database_do_database_task is not based on it.
def setUp(self):
# UnQLite and Vedis are sufficiently different that the open_database()
# call arguments have to be set diferrently for these engines.
if dbe_module is unqlite:
_oda = dbe_module, dbe_module.UnQLite, dbe_module.UnQLiteError
elif dbe_module is vedis:
_oda = dbe_module, dbe_module.Vedis, None
elif dbe_module is ndbm_module:
_oda = dbe_module, dbe_module.Ndbm, None
elif dbe_module is gnu_module:
_oda = dbe_module, dbe_module.Gnu, None
self._ssb = SegmentSize.db_segment_size_bytes
class _ED(_nosql.Database):
def open_database(self, **k):
super().open_database(*_oda, **k)
class _AD(_ED):
def __init__(self, folder, **k):
super().__init__({}, folder, **k)
self._AD = _AD
def tearDown(self):
self.database = None
self._AD = None
SegmentSize.db_segment_size_bytes = self._ssb
# I have no idea why the database teardown for gnu has to be like so:
if dbe_module is gnu_module:
path = os.path.join(os.path.dirname(__file__), _GNU_TEST_ROOT)
if os.path.isfile(path):
os.remove(path)
if os.path.isdir(path):
for f in os.listdir(path):
os.remove(os.path.join(path, f))
os.rmdir(path)
def test_01_do_database_task(self):
def m(*a, **k):
pass
if dbe_module in (ndbm_module,):
path = os.path.join(os.path.dirname(__file__), _NDBM_TEST_ROOT)
elif dbe_module in (gnu_module,):
path = os.path.join(os.path.dirname(__file__), _GNU_TEST_ROOT)
else:
path = None
self.database = self._AD(path)
d = self.database
d.open_database()
self.assertEqual(d.do_database_task(m), None)
def test_02_do_database_task(self):
def m(*a, **k):
pass
if dbe_module in (ndbm_module,):
path = os.path.join(os.path.dirname(__file__), _NDBM_TEST_ROOT)
elif dbe_module in (gnu_module,):
path = os.path.join(os.path.dirname(__file__), _GNU_TEST_ROOT)
else:
path = None
self.database = self._AD(path)
d = self.database
self.assertEqual(d.do_database_task(m), None)
# Memory databases are used for these tests.
# Use the 'testing only' segment size for convenience of setup and eyeballing.
class _NoSQLOpen(_NoSQL):
def setUp(self):
super().setUp()
self.database = self._D(
filespec.FileSpec(**{"file1": {"field1"}, "file2": {"field2"}}),
segment_size_bytes=None,
)
self.database.specification["file2"]["fields"]["Field2"][
"access_method"
] = "hash"
self.database.open_database(*self._oda)
def tearDown(self):
self.database.close_database()
super().tearDown()
class DatabaseTransactions(_NoSQLOpen):
def test_01(self):
self.database.start_transaction()
self.assertEqual(self.database.start_transaction(), None)
def test_02(self):
self.database.start_transaction()
self.assertEqual(self.database.backout(), None)
def test_03(self):
self.database.start_transaction()
self.assertEqual(self.database.commit(), None)
def test_04(self):
self.assertEqual(self.database.backout(), None)
def test_05(self):
self.assertEqual(self.database.commit(), None)
class Database_put_replace_delete(_NoSQLOpen):
# These tests are copied and modified from test__sqlite.
# The tests on put assume a correct add_record_to_ebm method, and those on
# delete assume a correct remove_record_from_ebm() method because the
# bitmaps are used to identify the highest record number allocated.
# UnQLite and Vedis do not have the notion of a record number like the
# rowid in a SQLite3 table, or the key of a Recno database in Berkeley DB,
# or the record number of a DPT file.
def test_01(self):
self.assertRaisesRegex(
TypeError,
"".join(
(
"put\(\) missing 3 required positional arguments: ",
"'file', 'key', and 'value'",
)
),
self.database.put,
)
self.assertRaisesRegex(
TypeError,
"".join(
(
"replace\(\) missing 4 required positional arguments: ",
"'file', 'key', 'oldvalue', and 'newvalue'",
)
),
self.database.replace,
)
self.assertRaisesRegex(
TypeError,
"".join(
(
"delete\(\) missing 3 required positional arguments: ",
"'file', 'key', and 'value'",
)
),
self.database.delete,
)
def test_02_put(self):
recno = self.database.put("file1", None, "new value")
self.assertEqual(recno, 0)
def test_03_put(self):
self.assertEqual("1__2" in self.database.dbenv, False)
self.assertEqual(self.database.put("file1", 2, "new value"), None)
self.database.add_record_to_ebm("file1", 2)
self.assertEqual("1_0_2" in self.database.dbenv, True)
recno = self.database.put("file1", None, "new value")
self.assertEqual(recno, 3)
def test_04_put(self):
recno = self.database.put("file1", None, "new value")
self.assertEqual(recno, 0)
self.database.add_record_to_ebm("file1", 0)
self.assertEqual(self.database.put("file1", 0, "renew value"), None)
recno = self.database.put("file1", None, "other value")
self.assertEqual(recno, 1)
def test_05_replace(self):
self.assertEqual("1_1" in self.database.dbenv, False)
self.assertEqual(
self.database.replace(
"file1", 1, repr("old value"), repr("new value")
),
None,
)
self.assertEqual("1_1" in self.database.dbenv, False)
self.database.dbenv["1_1"] = repr(None)
self.assertEqual("1_1" in self.database.dbenv, True)
self.assertEqual(
self.database.replace(
"file1", 1, repr("old value"), repr("new value")
),
None,
)
self.assertEqual("1_1" in self.database.dbenv, True)
def test_06_replace(self):
self.database.dbenv["1_0_1"] = repr("old value")
self.assertEqual(self.database.dbenv["1_0_1"], b"'old value'")
self.assertEqual(
self.database.replace(
"file1", 1, repr("old value"), repr("new value")
),
None,
)
self.assertEqual(self.database.dbenv["1_0_1"], b"'new value'")
def test_07_replace(self):
self.database.dbenv["1_1"] = repr("old value")
self.assertEqual(self.database.dbenv["1_1"], b"'old value'")
self.assertEqual(
self.database.replace(
"file1", 1, repr("new value"), repr("same value")
),
None,
)
self.assertEqual(self.database.dbenv["1_1"], b"'old value'")
def test_08_delete(self):
self.assertEqual("1_1" in self.database.dbenv, False)
self.assertEqual(
self.database.delete("file1", 1, repr("new value")), None
)
self.assertEqual("1_1" in self.database.dbenv, False)
self.database.dbenv["1_1"] = repr(None)
self.assertEqual("1_1" in self.database.dbenv, True)
self.assertEqual(
self.database.delete("file1", 1, repr("new value")), None
)
self.assertEqual("1_1" in self.database.dbenv, True)
def test_09_delete(self):
self.database.dbenv["1_0_1"] = repr("new value")
self.database.add_record_to_ebm("file1", 0)
self.assertEqual("1_0_1" in self.database.dbenv, True)
self.assertEqual(
self.database.delete("file1", 1, repr("new value")), None
)
self.database.remove_record_from_ebm("file1", 0)
self.assertEqual("1_0_1" in self.database.dbenv, False)
def test_10_delete(self):
self.database.dbenv["1_1"] = repr("new value")
self.assertEqual("1_1" in self.database.dbenv, True)
self.assertEqual(
self.database.delete("file1", 1, repr("old value")), None
)
self.assertEqual("1_1" in self.database.dbenv, True)
# These tests need fully working put, replace, and delete, methods.
class Database_methods(_NoSQLOpen):
def test_01(self):
self.assertRaisesRegex(
TypeError,
"".join(
(
"get_primary_record\(\) missing 2 required positional ",
"arguments: 'file' and 'key'",
)
),
self.database.get_primary_record,
)
self.assertRaisesRegex(
TypeError,
"".join(
(
"remove_record_from_ebm\(\) missing 2 required ",
"positional arguments: 'file' and 'deletekey'",
)
),
self.database.remove_record_from_ebm,
)
self.assertRaisesRegex(
TypeError,
"".join(
(
"add_record_to_ebm\(\) missing 2 required ",
"positional arguments: 'file' and 'putkey'",
)
),
self.database.add_record_to_ebm,
)
self.assertRaisesRegex(
TypeError,
"".join(
(
"get_high_record\(\) missing 1 required ",
"positional argument: 'file'",
)
),
self.database.get_high_record,
)
self.assertRaisesRegex(
TypeError,
"".join(
(
"recordlist_record_number\(\) takes from 2 to 4 ",
"positional arguments but 5 were given",
)
),
self.database.recordlist_record_number,
*(None, None, None, None),
)
self.assertRaisesRegex(
TypeError,
"".join(
(
"recordlist_record_number_range\(\) takes from 2 to 5 ",
"positional arguments but 6 were given",
)
),
self.database.recordlist_record_number_range,
*(None, None, None, None, None),
)
self.assertRaisesRegex(
TypeError,
"".join(
(
"recordlist_ebm\(\) takes from 2 to 3 ",
"positional arguments but 4 were given",
)
),
self.database.recordlist_ebm,
*(None, None, None),
)
self.assertRaisesRegex(
TypeError,
"".join(
(
"get_table_connection\(\) missing 1 required positional ",
"argument: 'file'",
)
),
self.database.get_table_connection,
)
def test_02_get_primary_record(self):
self.assertEqual(self.database.get_primary_record("file1", None), None)
def test_03_get_primary_record(self):
self.assertEqual(self.database.get_primary_record("file1", 1), None)
def test_04_get_primary_record(self):
self.database.put("file1", None, repr("new value"))
self.assertEqual(
self.database.get_primary_record("file1", 0), (0, "'new value'")
)
def test_05_remove_record_from_ebm(self):
self.assertRaisesRegex(
_nosql.DatabaseError,
"Existence bit map for segment does not exist",
self.database.remove_record_from_ebm,
*("file1", 2),
)
def test_06_remove_record_from_ebm(self):
self.assertEqual(self.database.add_record_to_ebm("file1", 2), (0, 2))
self.assertEqual(
self.database.remove_record_from_ebm("file1", 2), (0, 2)
)
def test_07_add_record_to_ebm(self):
self.assertEqual(self.database.add_record_to_ebm("file1", 2), (0, 2))
self.assertEqual(self.database.add_record_to_ebm("file1", 4), (0, 4))
def test_08_get_high_record(self):
self.assertEqual(self.database.get_high_record("file1"), None)
def test_14_recordset_record_number(self):
self.assertIsInstance(
self.database.recordlist_record_number("file1"),
recordset.RecordList,
)
def test_15_recordset_record_number(self):
self.assertIsInstance(
self.database.recordlist_record_number("file1", key=500),
recordset.RecordList,
)
def test_16_recordset_record_number(self):
dbenv = self.database.dbenv
self.assertEqual(dbenv.exists("1_0"), False)
self.assertEqual(dbenv["1_0__ebm"], b"[]")
self.assertEqual(dbenv.exists("1_0__ebm_0"), False)
dbenv["1_0"] = repr("Some value")
self.database.ebm_control["file1"].append_ebm_segment(
b"\x80" + b"\x00" * (SegmentSize.db_segment_size_bytes - 1),
self.database.dbenv,
)
rl = self.database.recordlist_record_number("file1", key=0)
self.assertIsInstance(rl, recordset.RecordList)
self.assertEqual(rl.count_records(), 1)
def test_17_recordset_record_number_range(self):
self.assertIsInstance(
self.database.recordlist_record_number_range("file1"),
recordset.RecordList,
)
def test_18_recordset_record_number_range(self):
self.create_ebm()
rs = self.database.recordlist_record_number_range(
"file1", keystart=0, keyend=2000
)
self.assertIsInstance(rs, recordset.RecordList)
self.assertEqual(
rs[0].tobytes(),
b"\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff",
)
def test_19_recordset_record_number_range(self):
self.create_ebm()
rs = self.database.recordlist_record_number_range("file1", keystart=10)
self.assertIsInstance(rs, recordset.RecordList)
self.assertEqual(
rs[0].tobytes(),
b"\x00\x3f\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff",
)
def test_20_recordset_record_number_range(self):
self.create_ebm()
rs = self.database.recordlist_record_number_range("file1", keyend=35)
self.assertIsInstance(rs, recordset.RecordList)
self.assertEqual(
rs[0].tobytes(),
b"\xff\xff\xff\xff\xe0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00",
)
def test_21_recordset_record_number_range(self):
self.create_ebm()
rs = self.database.recordlist_record_number_range(
"file1", keystart=10, keyend=35
)
self.assertIsInstance(rs, recordset.RecordList)
self.assertEqual(
rs[0].tobytes(),
b"\x00\x3f\xff\xff\xe0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00",
)
def test_22_recordset_record_number_range(self):
self.create_ebm()
self.create_ebm()
self.create_ebm()
self.create_ebm()
rs = self.database.recordlist_record_number_range(
"file1", keystart=170, keyend=350
)
self.assertIsInstance(rs, recordset.RecordList)
self.assertEqual(len(rs), 2)
self.assertEqual(
rs[1].tobytes(),
b"\x00\x00\x00\x00\x00\x3f\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff",
)
self.assertEqual(
rs[2].tobytes(),
b"\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfc\x00\x00\x00\x00",
)
def test_23_recordset_record_number_range(self):
self.create_ebm()
self.create_ebm()
self.create_ebm()
self.create_ebm()
rs = self.database.recordlist_record_number_range(
"file1", keystart=350, keyend=170
)
self.assertIsInstance(rs, recordset.RecordList)
self.assertEqual(len(rs), 0)
def test_24_recordset_ebm(self):
self.assertIsInstance(
self.database.recordlist_ebm("file1"), recordset.RecordList
)
def test_25_recordset_ebm(self):
self.create_ebm()
rlebm = self.database.recordlist_ebm("file1")
self.assertIsInstance(rlebm, recordset.RecordList)
self.assertEqual(rlebm.sorted_segnums, [0])
def test_26_get_table_connection(self):
if dbe_module is unqlite:
object_class = unqlite.UnQLite
elif dbe_module is vedis:
object_class = vedis.Vedis
elif dbe_module is ndbm_module:
object_class = ndbm_module.Ndbm
elif dbe_module is gnu_module:
object_class = gnu_module.Gnu
self.assertIsInstance(
self.database.get_table_connection("file1"), object_class
)
def create_ebm(self):
self.database.ebm_control["file1"].append_ebm_segment(
b"\xff" + b"\xff" * (SegmentSize.db_segment_size_bytes - 1),
self.database.dbenv,
)
class Database_find_values__empty(_NoSQLOpen):
def setUp(self):
super().setUp()
self.valuespec = ValuesClause()
self.valuespec.field = "field1"
def test_01_find_values(self):
self.assertRaisesRegex(
TypeError,
"".join(
(
"find_values\(\) missing 2 required positional arguments: ",
"'valuespec' and 'file'",
)
),
self.database.find_values,
)
def test_02_find_values(self):
self.valuespec.above_value = "b"
self.valuespec.below_value = "d"
self.assertEqual(
[i for i in self.database.find_values(self.valuespec, "file1")], []
)
def test_03_find_values(self):
self.valuespec.above_value = "b"
self.valuespec.to_value = "d"
self.assertEqual(
[i for i in self.database.find_values(self.valuespec, "file1")], []
)
def test_04_find_values(self):
self.valuespec.from_value = "b"
self.valuespec.to_value = "d"
self.assertEqual(
[i for i in self.database.find_values(self.valuespec, "file1")], []
)
def test_05_find_values(self):
self.valuespec.from_value = "b"
self.valuespec.below_value = "d"
self.assertEqual(
[i for i in self.database.find_values(self.valuespec, "file1")], []
)
def test_06_find_values(self):
self.valuespec.above_value = "b"
self.assertEqual(
[i for i in self.database.find_values(self.valuespec, "file1")], []
)
def test_07_find_values(self):
self.valuespec.from_value = "b"
self.assertEqual(
[i for i in self.database.find_values(self.valuespec, "file1")], []
)
def test_08_find_values(self):
self.valuespec.to_value = "d"
self.assertEqual(
[i for i in self.database.find_values(self.valuespec, "file1")], []
)
def test_09_find_values(self):
self.valuespec.below_value = "d"
self.assertEqual(
[i for i in self.database.find_values(self.valuespec, "file1")], []
)
def test_10_find_values(self):
self.assertEqual(
[i for i in self.database.find_values(self.valuespec, "file1")], []
)
class Database_find_values__populated(_NoSQLOpen):
def setUp(self):
super().setUp()
self.valuespec = ValuesClause()
self.valuespec.field = "field1"
self.database.trees["file1_field1"].insert("c")
self.database.trees["file1_field1"].insert("d")
self.database.trees["file1_field1"].insert("dk")
self.database.trees["file1_field1"].insert("e")
self.database.trees["file1_field1"].insert("f")
def test_01_find_values(self):
self.valuespec.above_value = "d"
self.valuespec.below_value = "e"
self.assertEqual(
[i for i in self.database.find_values(self.valuespec, "file1")],
["dk"],
)
def test_02_find_values(self):
self.valuespec.above_value = "d"
self.valuespec.to_value = "e"
self.assertEqual(
[i for i in self.database.find_values(self.valuespec, "file1")],
["dk", "e"],
)
def test_03_find_values(self):
self.valuespec.from_value = "d"
self.valuespec.to_value = "e"
self.assertEqual(
[i for i in self.database.find_values(self.valuespec, "file1")],
["d", "dk", "e"],
)
def test_04_find_values(self):
self.valuespec.from_value = "d"
self.valuespec.below_value = "e"
self.assertEqual(
[i for i in self.database.find_values(self.valuespec, "file1")],
["d", "dk"],
)
def test_05_find_values(self):
self.valuespec.above_value = "d"
self.assertEqual(
[i for i in self.database.find_values(self.valuespec, "file1")],
["dk", "e", "f"],
)
def test_06_find_values(self):
self.valuespec.from_value = "d"
self.assertEqual(
[i for i in self.database.find_values(self.valuespec, "file1")],
["d", "dk", "e", "f"],
)
def test_07_find_values(self):
self.valuespec.to_value = "e"
self.assertEqual(
[i for i in self.database.find_values(self.valuespec, "file1")],
["c", "d", "dk", "e"],
)
def test_08_find_values(self):
self.valuespec.below_value = "e"
self.assertEqual(
[i for i in self.database.find_values(self.valuespec, "file1")],
["c", "d", "dk"],
)
def test_09_find_values(self):
self.assertEqual(
[i for i in self.database.find_values(self.valuespec, "file1")],
["c", "d", "dk", "e", "f"],
)
class Database_add_record_to_field_value(_NoSQLOpen):
def test_01(self):
self.assertRaisesRegex(
TypeError,
"".join(
(
"add_record_to_field_value\(\) missing 5 required ",
"positional arguments: 'file', 'field', 'key', 'segment', ",
"and 'record_number'",
)
),
self.database.add_record_to_field_value,
)
def test_02__assumptions(self):
# Nothing exists yet, but tree is available for (file1, field1) only.
db = self.database.dbenv
self.assertEqual(db.exists("1_1_0_indexvalue"), False)
self.assertEqual(db.exists("1_1_1_2_indexvalue"), False)
self.assertEqual(db.exists("1_1"), False) # tree root
self.assertEqual(db.exists("1_1_2_0"), False) # a node
self.assertEqual("file1_field1" in self.database.trees, True)
self.assertEqual(db.exists("2_1_0_indexvalue"), False)
self.assertEqual(db.exists("2_1_1_2_indexvalue"), False)
self.assertEqual(db.exists("2_1"), False) # tree root
self.assertEqual(db.exists("2_1_2_0"), False) # a node
self.assertEqual("file2_field1" in self.database.trees, False)
self.assertEqual(
self.database.specification["file2"]["fields"]["Field2"][
"access_method"
],
"hash",
)
self.assertEqual(
self.database.specification["file1"]["fields"]["Field1"][
"access_method"
],
"btree",
)
def test_03_add_record_to_tree_field_value(self):
db = self.database.dbenv
self.database.add_record_to_field_value(
"file1", "field1", "indexvalue", 2, 0
)
self.assertEqual(db.exists("1_1"), True)
self.assertEqual(db.exists("1_1_0_indexvalue"), True)
self.assertEqual(
literal_eval(db["1_1_0_indexvalue"].decode()), {2: (0, 1)}
)
self.database.add_record_to_field_value(
"file1", "field1", "indexvalue", 3, 5
)
self.assertEqual(
literal_eval(db["1_1_0_indexvalue"].decode()),
{2: (0, 1), 3: (5, 1)},
)
self.database.add_record_to_field_value(
"file1", "field1", "indexvalue", 3, 5
)
self.assertEqual(
literal_eval(db["1_1_0_indexvalue"].decode()),
{2: (0, 1), 3: (5, 1)},
)
self.assertEqual(db.exists("1_1_1_3_indexvalue"), False)
self.database.add_record_to_field_value(
"file1", "field1", "indexvalue", 3, 6
)
self.assertEqual(
literal_eval(db["1_1_0_indexvalue"].decode()),
{2: (0, 1), 3: ("L", 2)},
)
self.assertEqual(
literal_eval(db["1_1_1_3_indexvalue"].decode()),
b"\x00\x05\x00\x06",
)
self.database.add_record_to_field_value(
"file1", "field1", "indexvalue", 3, 2
)
self.assertEqual(
literal_eval(db["1_1_0_indexvalue"].decode()),
{2: (0, 1), 3: ("L", 3)},
)
self.assertEqual(
literal_eval(db["1_1_1_3_indexvalue"].decode()),
b"\x00\x02\x00\x05\x00\x06",
)
for i in 10, 20, 30, 40:
self.database.add_record_to_field_value(
"file1", "field1", "indexvalue", 3, i
)
self.assertEqual(
literal_eval(db["1_1_0_indexvalue"].decode()),
{2: (0, 1), 3: ("L", 7)},
)
self.assertEqual(
literal_eval(db["1_1_1_3_indexvalue"].decode()),
b"\x00\x02\x00\x05\x00\x06\x00\x0a\x00\x14\x00\x1e\x00\x28",
)
self.database.add_record_to_field_value(
"file1", "field1", "indexvalue", 3, 50
)
self.assertEqual(
literal_eval(db["1_1_0_indexvalue"].decode()),
{2: (0, 1), 3: ("B", 8)},
)
self.assertEqual(
literal_eval(db["1_1_1_3_indexvalue"].decode()),
b"\x26\x20\x08\x02\x00\x80\x20\x00\x00\x00\x00\x00\x00\x00\x00\x00",
)
self.database.add_record_to_field_value(
"file1", "field1", "indexvalue", 3, 50
)
self.assertEqual(
literal_eval(db["1_1_0_indexvalue"].decode()),
{2: (0, 1), 3: ("B", 8)},
)
self.assertEqual(
literal_eval(db["1_1_1_3_indexvalue"].decode()),
b"\x26\x20\x08\x02\x00\x80\x20\x00\x00\x00\x00\x00\x00\x00\x00\x00",
)
self.database.add_record_to_field_value(
"file1", "field1", "indexvalue", 3, 51
)
self.assertEqual(
literal_eval(db["1_1_0_indexvalue"].decode()),
{2: (0, 1), 3: ("B", 9)},
)
self.assertEqual(
literal_eval(db["1_1_1_3_indexvalue"].decode()),
b"\x26\x20\x08\x02\x00\x80\x30\x00\x00\x00\x00\x00\x00\x00\x00\x00",
)
def test_04_add_record_to_hash_field_value(self):
db = self.database.dbenv
self.database.add_record_to_field_value(
"file2", "field2", "indexvalue", 2, 0
)
self.assertEqual(db.exists("2_1"), False) # This record never exists.
self.assertEqual(db.exists("2_1_0_indexvalue"), True)
self.assertEqual(
literal_eval(db["2_1_0_indexvalue"].decode()), {2: (0, 1)}
)
class Database_remove_record_from_field_value(_NoSQLOpen):
def test_01(self):
self.assertRaisesRegex(
TypeError,
"".join(
(
"remove_record_from_field_value\(\) missing 5 required ",
"positional arguments: 'file', 'field', 'key', 'segment', ",
"and 'record_number'",
)
),
self.database.remove_record_from_field_value,
)
def test_02_remove_record_from_tree_field_value(self):
db = self.database.dbenv
for i in 5, 6, 2, 10, 20, 30, 40, 50, 51:
self.database.add_record_to_field_value(
"file1", "field1", "indexvalue", 3, i
)
self.database.add_record_to_field_value(
"file1", "field1", "indexvalue", 2, 0
)
self.assertEqual(
literal_eval(db["1_1_0_indexvalue"].decode()),
{2: (0, 1), 3: ("B", 9)},
)
self.assertEqual(
literal_eval(db["1_1_1_3_indexvalue"].decode()),
b"\x26\x20\x08\x02\x00\x80\x30\x00\x00\x00\x00\x00\x00\x00\x00\x00",
)
self.database.remove_record_from_field_value(
"file1", "field1", "indexvalue", 4, 40
)
self.assertEqual(
literal_eval(db["1_1_0_indexvalue"].decode()),
{2: (0, 1), 3: ("B", 9)},
)
self.assertEqual(
literal_eval(db["1_1_1_3_indexvalue"].decode()),
b"\x26\x20\x08\x02\x00\x80\x30\x00\x00\x00\x00\x00\x00\x00\x00\x00",
)
self.database.remove_record_from_field_value(
"file1", "field1", "indexvalue", 3, 40
)
self.assertEqual(
literal_eval(db["1_1_0_indexvalue"].decode()),
{2: (0, 1), 3: ("B", 8)},
)
self.assertEqual(
literal_eval(db["1_1_1_3_indexvalue"].decode()),
b"\x26\x20\x08\x02\x00\x00\x30\x00\x00\x00\x00\x00\x00\x00\x00\x00",
)
for i in 50, 51, 20:
self.database.remove_record_from_field_value(
"file1", "field1", "indexvalue", 3, i
)
self.assertEqual(
literal_eval(db["1_1_0_indexvalue"].decode()),
{2: (0, 1), 3: ("B", 5)},
)
self.assertEqual(
literal_eval(db["1_1_1_3_indexvalue"].decode()),
b"\x26\x20\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00",
)
self.database.remove_record_from_field_value(
"file1", "field1", "indexvalue", 3, 10
)
self.assertEqual(
literal_eval(db["1_1_0_indexvalue"].decode()),
{2: (0, 1), 3: ("L", 4)},
)
self.assertEqual(
literal_eval(db["1_1_1_3_indexvalue"].decode()),
b"\x00\x02\x00\x05\x00\x06\x00\x1e",
)
for i in 2, 6:
self.database.remove_record_from_field_value(
"file1", "field1", "indexvalue", 3, i
)
self.assertEqual(
literal_eval(db["1_1_0_indexvalue"].decode()),
{2: (0, 1), 3: ("L", 2)},
)
self.assertEqual(
literal_eval(db["1_1_1_3_indexvalue"].decode()),
b"\x00\x05\x00\x1e",
)
self.database.remove_record_from_field_value(
"file1", "field1", "indexvalue", 3, 5
)
self.assertEqual(
literal_eval(db["1_1_0_indexvalue"].decode()),
{2: (0, 1), 3: (30, 1)},
)
self.assertEqual(db.exists("1_1_1_3_indexvalue"), False)
self.database.remove_record_from_field_value(
"file1", "field1", "indexvalue", 3, 30
)
self.assertEqual(
literal_eval(db["1_1_0_indexvalue"].decode()), {2: (0, 1)}
)
self.assertEqual(db.exists("1_1_1_3_indexvalue"), False)
self.assertEqual(db.exists("1_1_1_2_indexvalue"), False)
self.assertEqual(db.exists("1_1"), True)
self.database.remove_record_from_field_value(
"file1", "field1", "indexvalue", 2, 0
)
self.assertEqual(db.exists("1_1_0_indexvalue"), False)
self.assertEqual(db.exists("1_1_1_3_indexvalue"), False)
self.assertEqual(db.exists("1_1_1_2_indexvalue"), False)
self.assertEqual(db.exists("1_1"), False)
def test_03_remove_record_from_hash_field_value(self):
db = self.database.dbenv
self.database.add_record_to_field_value(
"file2", "field2", "indexvalue", 2, 0
)
self.assertEqual(db.exists("2_1"), False) # This record never exists.
self.assertEqual(db.exists("2_1_0_indexvalue"), True)
self.assertEqual(
literal_eval(db["2_1_0_indexvalue"].decode()), {2: (0, 1)}
)
self.database.remove_record_from_field_value(
"file2", "field2", "indexvalue", 2, 0
)
self.assertEqual(db.exists("2_1"), False)
self.assertEqual(db.exists("2_1_0_indexvalue"), False)
class Database_populate_segment(_NoSQLOpen):
def test_01(self):
self.assertRaisesRegex(
TypeError,
"".join(
(
"populate_segment\(\) missing 3 required ",
"positional arguments: ",
"'segment_number', 'segment_reference', and 'file'",
)
),
self.database.populate_segment,
)
def test_02_populate_segment(self):
s = self.database.populate_segment(2, 3, "file1")
self.assertIsInstance(s, recordset.RecordsetSegmentInt)
def test_04_populate_segment(self):
s = self.database.populate_segment(2, b"\x00\x40\x00\x41", "file1")
self.assertIsInstance(s, recordset.RecordsetSegmentList)
self.assertEqual(s.count_records(), 2)
def test_06_populate_segment(self):
s = self.database.populate_segment(
0,
b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\x00\x00\x00\x00",
"file1",
)
self.assertIsInstance(s, recordset.RecordsetSegmentBitarray)
self.assertEqual(s.count_records(), 24)
class _NoSQLOpenPopulated(_NoSQLOpen):
def setUp(self):
super().setUp()
segments = (
b"\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00",
b"\x00\x00\x00\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00",
b"\x00\x00\x00\x00\x00\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00",
b"\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\x00\x00\x00\x00\x00\x00",
b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\x00\x00\x00\x00",
b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\x00\x00",
b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff",
b"\x00\x40\x00\x41",
b"\x00\x42\x00\x43\x00\x44",
)
keys = (
"a_o",
"aa_o",
"ba_o",
"bb_o",
"c_o",
"cep",
"deq",
)
db = self.database.dbenv
for e, k in enumerate(keys):
self.database.trees["file1_field1"].insert(k)
db["1_1_1_0_" + k] = repr(segments[e])
db["1_1_0_" + k] = repr({0: ("B", 24 if e else 32)})
self.database.trees["file1_field1"].insert("tww")
db["1_1_1_0_" + "tww"] = repr(segments[7])
db["1_1_0_" + "tww"] = repr({0: ("L", 2)})
self.database.trees["file1_field1"].insert("twy")
db["1_1_1_0_" + "twy"] = repr(segments[8])
db["1_1_0_" + "twy"] = repr({0: ("L", 3)})
self.database.trees["file1_field1"].insert("one")
db["1_1_0_" + "one"] = repr({0: (50, 1)})
self.database.trees["file1_field1"].insert("nin")
db["1_1_0_" + "nin"] = repr({0: (100, 1)})
self.database.trees["file1_field1"].insert("www")
db["1_1_1_0_" + "www"] = repr(segments[8])
db["1_1_1_1_" + "www"] = repr(segments[8])
db["1_1_0_" + "www"] = repr({0: ("L", 3), 1: ("L", 3)})
class Database_make_recordset(_NoSQLOpenPopulated):
def test_01(self):
self.assertRaisesRegex(
TypeError,
"".join(
(
"recordlist_key_like\(\) takes from 3 to 5 ",
"positional arguments but 6 were given",
)
),
self.database.recordlist_key_like,
*(None, None, None, None, None),
)
self.assertRaisesRegex(
TypeError,
"".join(
(
"recordlist_key\(\) takes from 3 to 5 ",
"positional arguments but 6 were given",
)
),
self.database.recordlist_key,
*(None, None, None, None, None),
)
self.assertRaisesRegex(
TypeError,
"".join(
(
"recordlist_key_startswith\(\) takes from 3 to 5 ",
"positional arguments but 6 were given",
)
),
self.database.recordlist_key_startswith,
*(None, None, None, None, None),
)
self.assertRaisesRegex(
TypeError,
"".join(
(
"recordlist_key_range\(\) takes from 3 to 8 ",
"positional arguments but 9 were given",
)
),
self.database.recordlist_key_range,
*(None, None, None, None, None, None, None, None),
)
self.assertRaisesRegex(
TypeError,
"".join(
(
"recordlist_all\(\) takes from 3 to 4 ",
"positional arguments but 5 were given",
)
),
self.database.recordlist_all,
*(None, None, None, None),
)
self.assertRaisesRegex(
TypeError,
"".join(
(
"recordlist_nil\(\) takes from 2 to 3 ",
"positional arguments but 4 were given",
)
),
self.database.recordlist_nil,
*(None, None, None),
)
def test_02_make_recordset_key_like(self):
self.assertRaisesRegex(
_nosql.DatabaseError,
"'field2' field in 'file2' file is not ordered",
self.database.recordlist_key_like,
*("file2", "field2"),
)
def test_03_make_recordset_key_like(self):
rs = self.database.recordlist_key_like("file1", "field1")
self.assertIsInstance(rs, recordset.RecordList)
self.assertEqual(len(rs), 0)
def test_04_make_recordset_key_like(self):
rs = self.database.recordlist_key_like("file1", "field1", keylike="z")
self.assertIsInstance(rs, recordset.RecordList)
self.assertEqual(len(rs), 0)
def test_05_make_recordset_key_like(self):
rs = self.database.recordlist_key_like("file1", "field1", keylike="n")
self.assertIsInstance(rs, recordset.RecordList)
self.assertEqual(len(rs), 1)
self.assertEqual(rs[0].count_records(), 2)
self.assertIsInstance(rs[0], recordset.RecordsetSegmentBitarray)
def test_06_make_recordset_key_like(self):
rs = self.database.recordlist_key_like("file1", "field1", keylike="w")
self.assertIsInstance(rs, recordset.RecordList)
self.assertEqual(len(rs), 2)
self.assertEqual(rs[0].count_records(), 5)
self.assertIsInstance(rs[0], recordset.RecordsetSegmentBitarray)
def test_07_make_recordset_key_like(self):
rs = self.database.recordlist_key_like("file1", "field1", keylike="e")
self.assertIsInstance(rs, recordset.RecordList)
self.assertEqual(len(rs), 1)
self.assertEqual(rs[0].count_records(), 41)
self.assertIsInstance(rs[0], recordset.RecordsetSegmentBitarray)
def test_08_make_recordset_key(self):
rs = self.database.recordlist_key("file2", "field2")
self.assertIsInstance(rs, recordset.RecordList)
self.assertEqual(len(rs), 0)
def test_09_make_recordset_key(self):
rs = self.database.recordlist_key("file1", "field1")
self.assertIsInstance(rs, recordset.RecordList)
self.assertEqual(len(rs), 0)
def test_10_make_recordset_key(self):
rs = self.database.recordlist_key("file1", "field1", key="one")
self.assertIsInstance(rs, recordset.RecordList)
self.assertEqual(len(rs), 1)
self.assertEqual(rs[0].count_records(), 1)
self.assertIsInstance(rs[0], recordset.RecordsetSegmentInt)
def test_11_make_recordset_key(self):
rs = self.database.recordlist_key("file1", "field1", key="tww")
self.assertIsInstance(rs, recordset.RecordList)
self.assertEqual(len(rs), 1)
self.assertEqual(rs[0].count_records(), 2)
self.assertIsInstance(rs[0], recordset.RecordsetSegmentList)
def test_12_make_recordset_key(self):
rs = self.database.recordlist_key("file1", "field1", key="a_o")
self.assertIsInstance(rs, recordset.RecordList)
self.assertEqual(len(rs), 1)
self.assertEqual(rs[0].count_records(), 32)
self.assertIsInstance(rs[0], recordset.RecordsetSegmentBitarray)
def test_13_make_recordset_key_startswith(self):
self.assertRaisesRegex(
_nosql.DatabaseError,
"'field2' field in 'file2' file is not ordered",
self.database.recordlist_key_startswith,
*("file2", "field2"),
)
def test_14_make_recordset_key_startswith(self):
rs = self.database.recordlist_key_startswith("file1", "field1")
self.assertIsInstance(rs, recordset.RecordList)
self.assertEqual(len(rs), 0)
def test_15_make_recordset_key_startswith(self):
rs = self.database.recordlist_key_startswith(
"file1", "field1", keystart="ppp"
)
self.assertIsInstance(rs, recordset.RecordList)
self.assertEqual(len(rs), 0)
def test_16_make_recordset_key_startswith(self):
rs = self.database.recordlist_key_startswith(
"file1", "field1", keystart="o"
)
self.assertIsInstance(rs, recordset.RecordList)
self.assertEqual(rs[0].count_records(), 1)
self.assertIsInstance(rs[0], recordset.RecordsetSegmentInt)
def test_17_make_recordset_key_startswith(self):
rs = self.database.recordlist_key_startswith(
"file1", "field1", keystart="tw"
)
self.assertIsInstance(rs, recordset.RecordList)
self.assertEqual(rs[0].count_records(), 5)
self.assertIsInstance(rs[0], recordset.RecordsetSegmentBitarray)
def test_18_make_recordset_key_startswith(self):
rs = self.database.recordlist_key_startswith(
"file1", "field1", keystart="d"
)
self.assertIsInstance(rs, recordset.RecordList)
self.assertEqual(rs[0].count_records(), 24)
self.assertIsInstance(rs[0], recordset.RecordsetSegmentBitarray)
def test_19_make_recordset_key_range(self):
self.assertRaisesRegex(
_nosql.DatabaseError,
"'field2' field in 'file2' file is not ordered",
self.database.recordlist_key_range,
*("file2", "field2"),
)
def test_20_make_recordset_key_range(self):
rs = self.database.recordlist_key_range("file1", "field1")
self.assertIsInstance(rs, recordset.RecordList)
self.assertEqual(len(rs), 2)
self.assertEqual(rs[0].count_records(), 128)
self.assertIsInstance(rs[0], recordset.RecordsetSegmentBitarray)
def test_21_make_recordset_key_range(self):
rs = self.database.recordlist_key_range(
"file1", "field1", ge="ppp", le="qqq"
)
self.assertIsInstance(rs, recordset.RecordList)
self.assertEqual(len(rs), 0)
def test_22_make_recordset_key_range(self):
rs = self.database.recordlist_key_range(
"file1", "field1", ge="n", le="q"
)
self.assertIsInstance(rs, recordset.RecordList)
self.assertEqual(len(rs), 1)
self.assertEqual(rs[0].count_records(), 2)
self.assertIsInstance(rs[0], recordset.RecordsetSegmentBitarray)
def test_23_make_recordset_key_range(self):
rs = self.database.recordlist_key_range(
"file1", "field1", ge="t", le="tz"
)
self.assertIsInstance(rs, recordset.RecordList)
self.assertEqual(len(rs), 1)
self.assertEqual(rs[0].count_records(), 5)
self.assertIsInstance(rs[0], recordset.RecordsetSegmentBitarray)
def test_24_make_recordset_key_range(self):
rs = self.database.recordlist_key_range(
"file1", "field1", ge="c", le="cz"
)
self.assertIsInstance(rs, recordset.RecordList)
self.assertEqual(len(rs), 1)
self.assertEqual(rs[0].count_records(), 40)
self.assertIsInstance(rs[0], recordset.RecordsetSegmentBitarray)
def test_25_make_recordset_key_range(self):
rs = self.database.recordlist_key_range("file1", "field1", ge="c")
self.assertIsInstance(rs, recordset.RecordList)
self.assertEqual(len(rs), 2)
self.assertEqual(rs[0].count_records(), 62)
self.assertIsInstance(rs[0], recordset.RecordsetSegmentBitarray)
def test_26_make_recordset_key_range(self):
rs = self.database.recordlist_key_range("file1", "field1", le="cz")
self.assertIsInstance(rs, recordset.RecordList)
self.assertEqual(len(rs), 1)
self.assertEqual(rs[0].count_records(), 112)
self.assertIsInstance(rs[0], recordset.RecordsetSegmentBitarray)
def test_27_make_recordset_key_range(self):
rs = self.database.recordlist_key_range(
"file1", "field1", ge="ppp", lt="qqq"
)
self.assertIsInstance(rs, recordset.RecordList)
self.assertEqual(len(rs), 0)
def test_28_make_recordset_key_range(self):
rs = self.database.recordlist_key_range(
"file1", "field1", gt="ppp", lt="qqq"
)
self.assertIsInstance(rs, recordset.RecordList)
self.assertEqual(len(rs), 0)
def test_29_make_recordset_key_range(self):
rs = self.database.recordlist_key_range(
"file1", "field1", gt="n", le="q"
)
self.assertIsInstance(rs, recordset.RecordList)
self.assertEqual(len(rs), 1)
self.assertEqual(rs[0].count_records(), 2)
self.assertIsInstance(rs[0], recordset.RecordsetSegmentBitarray)
def test_30_make_recordset_key_range(self):
rs = self.database.recordlist_key_range(
"file1", "field1", gt="t", le="tz"
)
self.assertIsInstance(rs, recordset.RecordList)
self.assertEqual(len(rs), 1)
self.assertEqual(rs[0].count_records(), 5)
self.assertIsInstance(rs[0], recordset.RecordsetSegmentBitarray)
def test_31_make_recordset_key_range(self):
rs = self.database.recordlist_key_range(
"file1", "field1", gt="c", lt="cz"
)
self.assertIsInstance(rs, recordset.RecordList)
self.assertEqual(len(rs), 1)
self.assertEqual(rs[0].count_records(), 40)
self.assertIsInstance(rs[0], recordset.RecordsetSegmentBitarray)
def test_32_make_recordset_key_range(self):
rs = self.database.recordlist_key_range("file1", "field1", gt="c")
self.assertIsInstance(rs, recordset.RecordList)
self.assertEqual(len(rs), 2)
self.assertEqual(rs[0].count_records(), 62)
self.assertIsInstance(rs[0], recordset.RecordsetSegmentBitarray)
def test_33_make_recordset_key_range(self):
rs = self.database.recordlist_key_range("file1", "field1", lt="cz")
self.assertIsInstance(rs, recordset.RecordList)
self.assertEqual(len(rs), 1)
self.assertEqual(rs[0].count_records(), 112)
self.assertIsInstance(rs[0], recordset.RecordsetSegmentBitarray)
def test_34_make_recordset_all(self):
self.assertRaisesRegex(
_nosql.DatabaseError,
"'field2' field in 'file2' file is not ordered",
self.database.recordlist_all,
*("file2", "field2"),
)
def test_35_make_recordset_all(self):
rs = self.database.recordlist_all("file1", "field1")
self.assertIsInstance(rs, recordset.RecordList)
self.assertEqual(len(rs), 2)
self.assertEqual(rs[0].count_records(), 128)
self.assertIsInstance(rs[0], recordset.RecordsetSegmentBitarray)
def test_36_make_recordset_nil(self):
rs = self.database.recordlist_nil("file1")
self.assertIsInstance(rs, recordset.RecordList)
self.assertEqual(len(rs), 0)
class Database_file_unfile_records(_NoSQLOpenPopulated):
def test_01(self):
self.assertRaisesRegex(
TypeError,
"".join(
(
"unfile_records_under\(\) missing 3 required ",
"positional arguments: 'file', 'field', and 'key'",
)
),
self.database.unfile_records_under,
)
self.assertRaisesRegex(
TypeError,
"".join(
(
"file_records_under\(\) missing 4 required positional ",
"arguments: 'file', 'field', 'recordset', and 'key'",
)
),
self.database.file_records_under,
)
def test_02_unfile_records_under(self):
db = self.database.dbenv
self.assertEqual(
"aa_o"
in self.database.trees["file1_field1"].search("aa_o")[-1].node[4],
True,
)
self.assertEqual(db.exists("1_1_0_aa_o"), True)
self.assertEqual(db.exists("1_1_1_0_aa_o"), True)
self.database.unfile_records_under("file1", "field1", "aa_o")
self.assertEqual(db.exists("1_1_0_aa_o"), False)
self.assertEqual(db.exists("1_1_1_0_aa_o"), False)
self.assertEqual(
"aa_o"
in self.database.trees["file1_field1"].search("aa_o")[-1].node[4],
False,
)
def test_03_unfile_records_under(self):
db = self.database.dbenv
self.assertEqual(
"kkkk"
in self.database.trees["file1_field1"].search("aa_o")[-1].node[4],
False,
)
self.assertEqual(db.exists("1_1_0_kkkk"), False)
self.database.unfile_records_under("file1", "field1", "kkkk")
self.assertEqual(db.exists("1_1_0_kkkk"), False)
self.assertEqual(
"kkkk"
in self.database.trees["file1_field1"].search("aa_o")[-1].node[4],
False,
)
def test_04_file_records_under(self):
db = self.database.dbenv
rs = self.database.recordlist_all("file1", "field1")
self.assertEqual(
literal_eval(db["1_1_0_aa_o"].decode()), {0: ("B", 24)}
)
self.assertEqual(
literal_eval(db["1_1_1_0_aa_o"].decode()),
b"\x00\x00\x00\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00",
)
self.database.file_records_under("file1", "field1", rs, "aa_o")
self.assertEqual(
literal_eval(db["1_1_0_aa_o"].decode()),
{0: ("B", 128), 1: ("L", 3)},
)
self.assertEqual(
literal_eval(db["1_1_1_0_aa_o"].decode()),
b"\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff",
)
self.assertEqual(
literal_eval(db["1_1_1_1_aa_o"].decode()), b"\x00B\x00C\x00D"
)
def test_05_file_records_under(self):
db = self.database.dbenv
self.assertEqual(db.exists("1_1_0_rrr"), False)
rs = self.database.recordlist_all("file1", "field1")
self.database.file_records_under("file1", "field1", rs, "rrr")
self.assertEqual(
literal_eval(db["1_1_0_rrr"].decode()),
{0: ("B", 128), 1: ("L", 3)},
)
self.assertEqual(
literal_eval(db["1_1_1_0_rrr"].decode()),
b"\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff",
)
self.assertEqual(
literal_eval(db["1_1_1_1_rrr"].decode()), b"\x00B\x00C\x00D"
)
def test_06_file_records_under(self):
db = self.database.dbenv
self.assertEqual(literal_eval(db["1_1_0_twy"].decode()), {0: ("L", 3)})
self.assertEqual(
literal_eval(db["1_1_1_0_twy"].decode()), b"\x00B\x00C\x00D"
)
self.assertEqual(
literal_eval(db["1_1_0_aa_o"].decode()), {0: ("B", 24)}
)
self.assertEqual(
literal_eval(db["1_1_1_0_aa_o"].decode()),
b"\x00\x00\x00\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00",
)
rs = self.database.recordlist_key("file1", "field1", key="twy")
self.database.file_records_under("file1", "field1", rs, "aa_o")
self.assertEqual(literal_eval(db["1_1_0_twy"].decode()), {0: ("L", 3)})
self.assertEqual(
literal_eval(db["1_1_1_0_twy"].decode()), b"\x00B\x00C\x00D"
)
self.assertEqual(
literal_eval(db["1_1_0_aa_o"].decode()), {0: ("L", 3)}
)
self.assertEqual(
literal_eval(db["1_1_1_0_aa_o"].decode()), b"\x00B\x00C\x00D"
)
def test_07_file_records_under(self):
db = self.database.dbenv
self.assertEqual(literal_eval(db["1_1_0_twy"].decode()), {0: ("L", 3)})
self.assertEqual(
literal_eval(db["1_1_1_0_twy"].decode()), b"\x00B\x00C\x00D"
)
rs = self.database.recordlist_key("file1", "field1", key="twy")
self.assertEqual(db.exists("1_1_0_rrr"), False)
self.database.file_records_under("file1", "field1", rs, "rrr")
self.assertEqual(literal_eval(db["1_1_0_twy"].decode()), {0: ("L", 3)})
self.assertEqual(
literal_eval(db["1_1_1_0_twy"].decode()), b"\x00B\x00C\x00D"
)
self.assertEqual(literal_eval(db["1_1_0_rrr"].decode()), {0: ("L", 3)})
self.assertEqual(
literal_eval(db["1_1_1_0_rrr"].decode()), b"\x00B\x00C\x00D"
)
def test_08_file_records_under(self):
db = self.database.dbenv
self.assertEqual(literal_eval(db["1_1_0_one"].decode()), {0: (50, 1)})
self.assertEqual(
literal_eval(db["1_1_0_aa_o"].decode()), {0: ("B", 24)}
)
self.assertEqual(
literal_eval(db["1_1_1_0_aa_o"].decode()),
b"\x00\x00\x00\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00",
)
rs = self.database.recordlist_key("file1", "field1", key="one")
self.database.file_records_under("file1", "field1", rs, "aa_o")
self.assertEqual(literal_eval(db["1_1_0_one"].decode()), {0: (50, 1)})
self.assertEqual(literal_eval(db["1_1_0_aa_o"].decode()), {0: (50, 1)})
self.assertEqual(db.exists("1_1_1_0_aa_o"), False)
def test_09_file_records_under(self):
db = self.database.dbenv
self.assertEqual(literal_eval(db["1_1_0_one"].decode()), {0: (50, 1)})
self.assertEqual(db.exists("1_1_0_rrr"), False)
rs = self.database.recordlist_key("file1", "field1", key="one")
self.database.file_records_under("file1", "field1", rs, "rrr")
self.assertEqual(literal_eval(db["1_1_0_one"].decode()), {0: (50, 1)})
self.assertEqual(literal_eval(db["1_1_0_rrr"].decode()), {0: (50, 1)})
def test_10_file_records_under(self):
db = self.database.dbenv
self.assertEqual(
literal_eval(db["1_1_0_ba_o"].decode()), {0: ("B", 24)}
)
self.assertEqual(
literal_eval(db["1_1_1_0_ba_o"].decode()),
b"\x00\x00\x00\x00\x00\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00",
)
self.assertEqual(
literal_eval(db["1_1_0_www"].decode()), {0: ("L", 3), 1: ("L", 3)}
)
self.assertEqual(
literal_eval(db["1_1_1_0_www"].decode()), b"\x00B\x00C\x00D"
)
self.assertEqual(
literal_eval(db["1_1_1_1_www"].decode()), b"\x00B\x00C\x00D"
)
rs = self.database.recordlist_key("file1", "field1", key="ba_o")
self.database.file_records_under("file1", "field1", rs, "www")
self.assertEqual(
literal_eval(db["1_1_0_ba_o"].decode()), {0: ("B", 24)}
)
self.assertEqual(
literal_eval(db["1_1_1_0_ba_o"].decode()),
b"\x00\x00\x00\x00\x00\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00",
)
self.assertEqual(
literal_eval(db["1_1_0_www"].decode()), {0: ("B", 24)}
)
self.assertEqual(
literal_eval(db["1_1_1_0_www"].decode()),
b"\x00\x00\x00\x00\x00\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00",
)
self.assertEqual(db.exists("1_1_1_1_www"), False)
class Database_database_create_cursors(_NoSQLOpen):
def test_01(self):
self.assertRaisesRegex(
TypeError,
"".join(
(
"database_cursor\(\) takes from 3 to 4 ",
"positional arguments but 5 were given",
)
),
self.database.database_cursor,
*(None, None, None, None),
)
self.assertRaisesRegex(
TypeError,
"".join(
(
"create_recordset_cursor\(\) missing 1 required positional ",
"argument: 'recordset'",
)
),
self.database.create_recordset_cursor,
)
def test_02_database_cursor_primary(self):
self.assertIsInstance(
self.database.database_cursor("file1", "file1"),
_nosql.CursorPrimary,
)
def test_03_database_cursor_secondary_tree(self):
self.assertIsInstance(
self.database.database_cursor("file1", "field1"),
_nosql.CursorSecondary,
)
def test_04_database_cursor_secondary_hash(self):
self.assertRaisesRegex(
_nosql.DatabaseError,
"'field2' field in 'file2' file is not ordered",
self.database.database_cursor,
*("file2", "field2"),
)
def test_05_create_recordset_cursor(self):
d = self.database
rs = d.recordlist_key("file1", "field1", key="ba_o")
self.assertIsInstance(
d.create_recordset_cursor(rs), recordset.RecordsetCursor
)
class Database_freed_record_number(_NoSQLOpen):
def setUp(self):
super().setUp()
for i in range(SegmentSize.db_segment_size * 3):
self.database.dbenv["_".join(("1_0", str(i)))] = repr(
"_".join((str(i), "value"))
)
self.database.add_record_to_ebm("file1", i)
self.high_record = self.database.get_high_record("file1")
self.database.ebm_control["file1"].segment_count = divmod(
self.high_record[0], SegmentSize.db_segment_size
)[0]
def test_01(self):
self.assertRaisesRegex(
TypeError,
"".join(
(
"get_lowest_freed_record_number\(\) missing 1 required ",
"positional argument: 'dbset'",
)
),
self.database.get_lowest_freed_record_number,
)
self.assertRaisesRegex(
TypeError,
"".join(
(
"note_freed_record_number_segment\(\) missing 4 required ",
"positional arguments: 'dbset', 'segment', ",
"'record_number_in_segment', and 'high_record'",
)
),
self.database.note_freed_record_number_segment,
)
def test_02_note_freed_record_number_segment(self):
self.assertEqual(
self.database.ebm_control["file1"].freed_record_number_pages, None
)
for i in (
100,
101,
200,
300,
):
self.database.delete("file1", i, repr("_".join((str(i), "value"))))
sn, rn = self.database.remove_record_from_ebm("file1", i)
self.database.note_freed_record_number_segment(
"file1", sn, rn, self.high_record
)
self.assertEqual(
self.database.ebm_control["file1"].freed_record_number_pages,
[0, 1, 2],
)
self.database.ebm_control["file1"].freed_record_number_pages = None
self.assertEqual(
self.database.ebm_control["file1"].freed_record_number_pages, None
)
for i in (201,):
self.database.delete("file1", i, repr("_".join((str(i), "value"))))
sn, rn = self.database.remove_record_from_ebm("file1", i)
self.database.note_freed_record_number_segment(
"file1", sn, rn, self.high_record
)
self.assertEqual(
self.database.ebm_control["file1"].freed_record_number_pages,
[0, 1, 2],
)
def test_03_get_lowest_freed_record_number(self):
rn = self.database.get_lowest_freed_record_number("file1")
self.assertEqual(rn, None)
def test_04_get_lowest_freed_record_number(self):
for i in (
100,
101,
200,
300,
):
self.database.delete("file1", i, repr("_".join((str(i), "value"))))
sn, rn = self.database.remove_record_from_ebm("file1", i)
self.database.note_freed_record_number_segment(
"file1", sn, rn, self.high_record
)
rn = self.database.get_lowest_freed_record_number("file1")
self.assertEqual(rn, 100)
def test_05_get_lowest_freed_record_number(self):
for i in (380,):
self.database.delete("file1", i, repr("_".join((str(i), "value"))))
sn, rn = self.database.remove_record_from_ebm("file1", i)
self.database.note_freed_record_number_segment(
"file1", sn, rn, self.high_record
)
rn = self.database.get_lowest_freed_record_number("file1")
self.assertEqual(rn, None)
def test_06_get_lowest_freed_record_number(self):
for i in (110,):
self.database.delete("file1", i, repr("_".join((str(i), "value"))))
sn, rn = self.database.remove_record_from_ebm("file1", i)
self.database.note_freed_record_number_segment(
"file1", sn, rn, self.high_record
)
rn = self.database.get_lowest_freed_record_number("file1")
self.assertEqual(rn, 110)
# The freed record number in segment number 2, 'divmod(380, 128)', is not
# seen until segment number 4 has records.
# Segment 2 is not deleted from the 'freed record number' list until the
# first search of the segment after all freed record numbers have been
# re-used.
def test_07_get_lowest_freed_record_number(self):
self.assertEqual(
self.database.ebm_control["file1"].freed_record_number_pages, None
)
for i in (380,):
self.database.delete("file1", i, repr("_".join((str(i), "value"))))
sn, rn = self.database.remove_record_from_ebm("file1", i)
self.database.note_freed_record_number_segment(
"file1", sn, rn, self.high_record
)
self.assertEqual(
len(self.database.ebm_control["file1"].freed_record_number_pages),
1,
)
rn = self.database.get_lowest_freed_record_number("file1")
self.assertEqual(rn, None)
i = self.high_record[0]
for i in range(i, i + 129):
self.database.dbenv["_".join(("1_0", str(i)))] = repr(
"_".join((str(i), "value"))
)
self.database.add_record_to_ebm("file1", i)
self.assertEqual(
len(self.database.ebm_control["file1"].freed_record_number_pages),
1,
)
self.high_record = self.database.get_high_record("file1")
self.database.ebm_control["file1"].segment_count = divmod(
self.high_record[0], SegmentSize.db_segment_size
)[0]
rn = self.database.get_lowest_freed_record_number("file1")
self.assertEqual(rn, 380)
self.assertEqual(
len(self.database.ebm_control["file1"].freed_record_number_pages),
1,
)
self.database.add_record_to_ebm("file1", 380)
rn = self.database.get_lowest_freed_record_number("file1")
self.assertEqual(rn, None)
self.assertEqual(
len(self.database.ebm_control["file1"].freed_record_number_pages),
0,
)
# Does this test add anything beyond Database_freed_record_number?
class Database_empty_freed_record_number(_NoSQLOpen):
def setUp(self):
super().setUp()
self.high_record = self.database.get_high_record("file1")
def test_01(self):
self.assertEqual(
self.database.ebm_control["file1"].freed_record_number_pages, None
)
self.database.note_freed_record_number_segment(
"file1", 0, 100, self.high_record
)
self.assertEqual(
self.database.ebm_control["file1"].freed_record_number_pages, None
)
self.assertEqual(
self.database.get_high_record("file1"), self.high_record
)
class RecordsetCursor(_NoSQLOpen):
def setUp(self):
super().setUp()
segments = (
b"\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00",
b"\x00\x00\x00\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00",
b"\x00\x00\x00\x00\x00\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00",
)
key = "a_o"
for i in range(380):
self.database.dbenv["_".join(("1", "0", str(i)))] = repr(
str(i) + "Any value"
)
bits = b"\xff" + b"\xff" * (SegmentSize.db_segment_size_bytes - 1)
self.database.dbenv["_".join(("1", "0", "_ebm", "0"))] = repr(bits)
self.database.dbenv["_".join(("1", "0", "_ebm", "1"))] = repr(bits)
self.database.dbenv["_".join(("1", "0", "_ebm", "2"))] = repr(bits)
self.database.dbenv["_".join(("1", "0", "_ebm"))] = repr((0, 1, 2))
for e, s in enumerate(segments):
self.database.dbenv["_".join(("1", "1", "1", str(e), key))] = repr(
s
)
self.database.dbenv["_".join(("1", "1", "0", key))] = repr(
{0: "B", 1: "B", 2: "B"}
)
def test_01(self):
self.assertRaisesRegex(
TypeError,
"".join(
(
"__init__\(\) missing 2 required ",
"positional arguments: 'recordset' and 'engine'",
)
),
_nosql.RecordsetCursor,
)
self.assertRaisesRegex(
TypeError,
"".join(
(
"_get_record\(\) missing 1 required ",
"positional argument: 'record_number'",
)
),
_nosql.RecordsetCursor(None, None)._get_record,
)
def test_02___init__01(self):
rc = _nosql.RecordsetCursor(None, True)
self.assertEqual(rc.engine, True)
def test_03___init__02(self):
rs = self.database.recordlist_key("file1", "field1", key="a_o")
rc = _nosql.RecordsetCursor(rs, self.database.dbenv)
self.assertIs(rc.engine, self.database.dbenv)
self.assertIs(rc._dbset, rs)
def test_04__get_record(self):
rc = _nosql.RecordsetCursor(
self.database.recordlist_key("file1", "field1", key="a_o"),
self.database.dbenv,
)
self.assertEqual(rc._get_record(4000), None)
self.assertEqual(rc._get_record(120), None)
self.assertEqual(rc._get_record(10), (10, "'10Any value'"))
self.assertEqual(rc._get_record(155), (155, "'155Any value'"))
class ExistenceBitmapControl(_NoSQLOpen):
def setUp(self):
super().setUp()
def test_01(self):
self.assertRaisesRegex(
TypeError,
"".join(
(
"read_exists_segment\(\) missing 2 required ",
"positional arguments: 'segment_number' and 'dbenv'",
)
),
self.database.ebm_control["file1"].read_exists_segment,
)
self.assertRaisesRegex(
TypeError,
"".join(
(
"get_ebm_segment\(\) missing 2 required ",
"positional arguments: 'key' and 'dbenv'",
)
),
self.database.ebm_control["file1"].get_ebm_segment,
)
self.assertRaisesRegex(
TypeError,
"".join(
(
"delete_ebm_segment\(\) missing 2 required ",
"positional arguments: 'key' and 'dbenv'",
)
),
self.database.ebm_control["file1"].delete_ebm_segment,
)
self.assertRaisesRegex(
TypeError,
"".join(
(
"put_ebm_segment\(\) missing 3 required ",
"positional arguments: 'key', 'value', and 'dbenv'",
)
),
self.database.ebm_control["file1"].put_ebm_segment,
)
self.assertRaisesRegex(
TypeError,
"".join(
(
"append_ebm_segment\(\) missing 2 required ",
"positional arguments: 'value' and 'dbenv'",
)
),
self.database.ebm_control["file1"].append_ebm_segment,
)
def test_02_read_exists_segment_01(self):
self.assertEqual(self.database.ebm_control["file1"]._segment_count, 0)
self.assertEqual(
self.database.ebm_control["file1"].read_exists_segment(0, None),
None,
)
def test_03_read_exists_segment_02(self):
self.assertEqual(self.database.ebm_control["file1"]._segment_count, 0)
bits = b"\xff" + b"\xff" * (SegmentSize.db_segment_size_bytes - 1)
self.database.dbenv["_".join(("1", "0", "_ebm", "0"))] = repr(bits)
self.database.dbenv["_".join(("1", "0", "_ebm", "1"))] = repr(bits)
self.database.dbenv["_".join(("1", "0", "_ebm", "2"))] = repr(bits)
self.database.ebm_control["file1"]._segment_count = 3
self.database.ebm_control["file1"].table_ebm_segments = [0, 1, 2]
seg = self.database.ebm_control["file1"].read_exists_segment(
0, self.database.dbenv
)
self.assertEqual(seg.count(), 128)
seg = self.database.ebm_control["file1"].read_exists_segment(
1, self.database.dbenv
)
self.assertEqual(seg.count(), 128)
def test_04_get_ebm_segment_01(self):
sr = self.database.ebm_control["file1"].get_ebm_segment(
0, self.database.dbenv
)
self.assertEqual(sr, None)
def test_05_get_ebm_segment_02(self):
bits = b"\xff" + b"\xff" * (SegmentSize.db_segment_size_bytes - 1)
self.database.dbenv["_".join(("1", "0", "_ebm", "0"))] = repr(bits)
self.database.ebm_control["file1"].table_ebm_segments = [0]
sr = self.database.ebm_control["file1"].get_ebm_segment(
0, self.database.dbenv
)
self.assertEqual(sr, bits)
def test_06_delete_ebm_segment_01(self):
self.database.ebm_control["file1"].delete_ebm_segment(
0, self.database.dbenv
)
def test_07_delete_ebm_segment_02(self):
bits = b"\xff" + b"\xff" * (SegmentSize.db_segment_size_bytes - 1)
self.database.dbenv["_".join(("1", "0", "_ebm", "0"))] = repr(bits)
self.database.ebm_control["file1"].table_ebm_segments = [0]
self.database.ebm_control["file1"].delete_ebm_segment(
0, self.database.dbenv
)
def test_08_put_ebm_segment_01(self):
bits = b"\xff" + b"\xff" * (SegmentSize.db_segment_size_bytes - 1)
self.database.ebm_control["file1"].put_ebm_segment(
0, bits, self.database.dbenv
)
self.assertEqual(
"_".join(("1", "0", "_ebm", "0")) in self.database.dbenv, False
)
def test_09_put_ebm_segment_02(self):
bits = b"\xff" + b"\xff" * (SegmentSize.db_segment_size_bytes - 1)
self.database.ebm_control["file1"].table_ebm_segments = [0]
self.database.ebm_control["file1"].put_ebm_segment(
0, bits, self.database.dbenv
)
self.assertEqual(
self.database.dbenv["_".join(("1", "0", "_ebm", "0"))],
repr(bits).encode(),
)
def test_10_append_ebm_segment(self):
bits = b"\xff" + b"\xff" * (SegmentSize.db_segment_size_bytes - 1)
self.database.ebm_control["file1"].append_ebm_segment(
bits, self.database.dbenv
)
def test_11_set_high_record_number_01(self):
self.database.ebm_control["file1"].set_high_record_number(
self.database.dbenv
)
self.assertEqual(
self.database.ebm_control["file1"].high_record_number, -1
)
def test_12_set_high_record_number_02(self):
bits0 = b"\x00" + b"\x00" * (SegmentSize.db_segment_size_bytes - 1)
bits1 = b"\xff" + b"\xff" * (SegmentSize.db_segment_size_bytes - 1)
self.database.ebm_control["file1"].table_ebm_segments = [0, 1, 2]
self.database.ebm_control["file1"].put_ebm_segment(
0, bits0, self.database.dbenv
)
self.database.ebm_control["file1"].put_ebm_segment(
1, bits1, self.database.dbenv
)
self.database.ebm_control["file1"].put_ebm_segment(
2, bits1, self.database.dbenv
)
self.database.ebm_control["file1"].set_high_record_number(
self.database.dbenv
)
self.assertEqual(
self.database.ebm_control["file1"].high_record_number, 383
)
def test_13_set_high_record_number_03(self):
bits0 = b"\x00" + b"\x00" * (SegmentSize.db_segment_size_bytes - 1)
bits1 = b"\xff" + b"\xff" * (SegmentSize.db_segment_size_bytes - 1)
self.database.ebm_control["file1"].table_ebm_segments = [0, 1, 2]
self.database.ebm_control["file1"].put_ebm_segment(
0, bits0, self.database.dbenv
)
self.database.ebm_control["file1"].put_ebm_segment(
1, bits1, self.database.dbenv
)
self.database.ebm_control["file1"].put_ebm_segment(
2, bits0, self.database.dbenv
)
self.database.ebm_control["file1"].set_high_record_number(
self.database.dbenv
)
self.assertEqual(
self.database.ebm_control["file1"].high_record_number, 255
)
def test_14_set_high_record_number_04(self):
bits0 = b"\x00" + b"\x00" * (SegmentSize.db_segment_size_bytes - 1)
bits1 = b"\xff" + b"\x00" * (SegmentSize.db_segment_size_bytes - 1)
self.database.ebm_control["file1"].table_ebm_segments = [0, 1, 2]
self.database.ebm_control["file1"].put_ebm_segment(
0, bits0, self.database.dbenv
)
self.database.ebm_control["file1"].put_ebm_segment(
1, bits1, self.database.dbenv
)
self.database.ebm_control["file1"].put_ebm_segment(
2, bits0, self.database.dbenv
)
self.database.ebm_control["file1"].set_high_record_number(
self.database.dbenv
)
self.assertEqual(
self.database.ebm_control["file1"].high_record_number, 135
)
if __name__ == "__main__":
runner = unittest.TextTestRunner
loader = unittest.defaultTestLoader.loadTestsFromTestCase
for dbe_module in unqlite, vedis, ndbm_module, gnu_module:
if dbe_module is None:
continue
runner().run(loader(Database___init__))
runner().run(loader(Database_transaction_methods))
runner().run(loader(DatabaseInstance))
runner().run(loader(Database_open_database))
runner().run(loader(Database_do_database_task))
runner().run(loader(DatabaseTransactions))
runner().run(loader(Database_put_replace_delete))
runner().run(loader(Database_methods))
runner().run(loader(Database_find_values__empty))
runner().run(loader(Database_find_values__populated))
runner().run(loader(Database_add_record_to_field_value))
runner().run(loader(Database_remove_record_from_field_value))
runner().run(loader(Database_populate_segment))
runner().run(loader(Database_make_recordset))
runner().run(loader(Database_file_unfile_records))
runner().run(loader(Database_database_create_cursors))
runner().run(loader(Database_freed_record_number))
runner().run(loader(Database_empty_freed_record_number))
runner().run(loader(RecordsetCursor))
runner().run(loader(ExistenceBitmapControl))
| 37.04958
| 81
| 0.576701
| 11,345
| 97,144
| 4.696959
| 0.050595
| 0.104266
| 0.039184
| 0.043012
| 0.832154
| 0.789742
| 0.739205
| 0.707302
| 0.66778
| 0.622347
| 0
| 0.044157
| 0.295973
| 97,144
| 2,621
| 82
| 37.063716
| 0.734984
| 0.043513
| 0
| 0.539628
| 0
| 0.014292
| 0.129248
| 0.03228
| 0
| 0
| 0
| 0
| 0.20615
| 1
| 0.083586
| false
| 0.001299
| 0.007362
| 0
| 0.103075
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
cce01ea010356dd637e67a19130e7684bac88e5d
| 216
|
py
|
Python
|
Python/Strings/StringValidators.py
|
devansh-pratap-singh/hackerrank-solutions
|
227817d90846424cd3078e60b225eb201e906cf9
|
[
"MIT"
] | 1
|
2020-10-15T14:03:52.000Z
|
2020-10-15T14:03:52.000Z
|
Python/Strings/StringValidators.py
|
devansh-pratap-singh/HackerRank-Solutions
|
227817d90846424cd3078e60b225eb201e906cf9
|
[
"MIT"
] | null | null | null |
Python/Strings/StringValidators.py
|
devansh-pratap-singh/HackerRank-Solutions
|
227817d90846424cd3078e60b225eb201e906cf9
|
[
"MIT"
] | null | null | null |
s = input()
print(any(char.isalnum() for char in s))
print(any(char.isalpha() for char in s))
print(any(char.isdigit() for char in s))
print(any(char.islower() for char in s))
print(any(char.isupper() for char in s))
| 36
| 40
| 0.699074
| 42
| 216
| 3.595238
| 0.285714
| 0.264901
| 0.397351
| 0.331126
| 0.582781
| 0.582781
| 0.582781
| 0
| 0
| 0
| 0
| 0
| 0.125
| 216
| 6
| 41
| 36
| 0.798942
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.833333
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
cce59db25be0d14444c9bb54d6da1895fa630ad2
| 128
|
py
|
Python
|
python/8kyu/convert_a_string_to_number.py
|
Sigmanificient/codewars
|
b34df4bf55460d312b7ddf121b46a707b549387a
|
[
"MIT"
] | 3
|
2021-06-08T01:57:13.000Z
|
2021-06-26T10:52:47.000Z
|
python/8kyu/convert_a_string_to_number.py
|
Sigmanificient/codewars
|
b34df4bf55460d312b7ddf121b46a707b549387a
|
[
"MIT"
] | null | null | null |
python/8kyu/convert_a_string_to_number.py
|
Sigmanificient/codewars
|
b34df4bf55460d312b7ddf121b46a707b549387a
|
[
"MIT"
] | 2
|
2021-06-10T21:20:13.000Z
|
2021-06-30T10:13:26.000Z
|
"""Kata url: https://www.codewars.com/kata/544675c6f971f7399a000e79."""
def string_to_number(s: int) -> int:
return int(s)
| 25.6
| 71
| 0.703125
| 18
| 128
| 4.888889
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.168142
| 0.117188
| 128
| 4
| 72
| 32
| 0.610619
| 0.507813
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
693245eeabea01ffced189b7f7f81104b8993ecd
| 113
|
py
|
Python
|
plotify/__init__.py
|
seba-1511/plotify
|
18e2256ee8c05f804fdeaa8d8cd6729ab5fa34c9
|
[
"Apache-2.0"
] | 3
|
2019-07-30T21:29:52.000Z
|
2020-08-07T22:25:46.000Z
|
plotify/__init__.py
|
seba-1511/plotify
|
18e2256ee8c05f804fdeaa8d8cd6729ab5fa34c9
|
[
"Apache-2.0"
] | null | null | null |
plotify/__init__.py
|
seba-1511/plotify
|
18e2256ee8c05f804fdeaa8d8cd6729ab5fa34c9
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
from .plot import *
from .custom_plots import PublicationPlot, LowResPlot, ListContainer
| 22.6
| 68
| 0.79646
| 14
| 113
| 6.357143
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.01
| 0.115044
| 113
| 4
| 69
| 28.25
| 0.88
| 0.185841
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
694ff1c78f244ae7e1118130701141e492316d0d
| 2,652
|
py
|
Python
|
localization/views.py
|
diegoalejogm/EnHuecoAPI
|
3d83d9fe07b5a14e9294cfdd675104d4c052af40
|
[
"MIT"
] | 2
|
2015-10-21T08:57:48.000Z
|
2016-02-21T21:33:17.000Z
|
localization/views.py
|
diegoalejogm/EnHuecoAPI
|
3d83d9fe07b5a14e9294cfdd675104d4c052af40
|
[
"MIT"
] | 1
|
2016-02-21T18:46:10.000Z
|
2016-02-21T18:46:10.000Z
|
localization/views.py
|
diegoalejogm/EnHuecoAPI
|
3d83d9fe07b5a14e9294cfdd675104d4c052af40
|
[
"MIT"
] | 1
|
2020-09-12T19:04:52.000Z
|
2020-09-12T19:04:52.000Z
|
from django.shortcuts import render
import datetime, sys
from rest_framework import viewsets, status
from rest_framework.response import Response
from localization.serializers import LocationSerializer
from users.serializers import UserLocationSerializer
from users.models import User
class LocationsViewSet(viewsets.ViewSet):
def list(self, request, pk):
try:
now = datetime.datetime.now()
earlier = datetime.datetime.now() - datetime.timedelta(minutes=5)
friends = User.objects.get(login=pk).friends.exclude(location__bssid="").filter(
location__bssid_date__range=(earlier, now))
serializer = UserLocationSerializer(friends, many=True)
return Response(serializer.data, status=status.HTTP_200_OK)
except User.DoesNotExist:
return Response(status=status.HTTP_400_BAD_REQUEST)
except:
return Response(sys.exc_info()[0], status=status.HTTP_500_INTERNAL_SERVER_ERROR)
def update(self, request, pk):
try:
user = User.objects.get(login=pk)
serializer = LocationSerializer(user.location, data=request.data, partial=True)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_200_OK)
return Response("Invalid Serializer", status=status.HTTP_400_BAD_REQUEST)
except (User.DoesNotExist, ValueError) as e:
return Response(e, status=status.HTTP_400_BAD_REQUEST)
except:
return Response(sys.exc_info()[0], status=status.HTTP_500_INTERNAL_SERVER_ERROR)
def updateWithFriendsList(self, request, pk):
try:
user = User.objects.get(login=pk)
serializer = LocationSerializer(user.location, data=request.data, partial=True)
if serializer.is_valid():
serializer.save()
now = datetime.datetime.now()
earlier = now - datetime.timedelta(minutes=5)
friends = User.objects.get(login=pk).friends.exclude(location__bssid="").filter(
location__bssid_date__range=(earlier, now))
serializer = UserLocationSerializer(friends, many=True)
return Response(serializer.data, status=status.HTTP_200_OK)
return Response("Invalid Serializer", status=status.HTTP_400_BAD_REQUEST)
except User.DoesNotExist:
return Response("User does not exist", status=status.HTTP_400_BAD_REQUEST)
except:
return Response(sys.exc_info()[0], status=status.HTTP_500_INTERNAL_SERVER_ERROR)
| 49.111111
| 96
| 0.672323
| 295
| 2,652
| 5.861017
| 0.257627
| 0.089069
| 0.101793
| 0.054945
| 0.772123
| 0.709659
| 0.709659
| 0.709659
| 0.709659
| 0.709659
| 0
| 0.018793
| 0.237557
| 2,652
| 53
| 97
| 50.037736
| 0.836301
| 0
| 0
| 0.653061
| 0
| 0
| 0.020739
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.061224
| false
| 0
| 0.142857
| 0
| 0.44898
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
697150709ade74fce3512c21ab4e233e568cb034
| 240
|
py
|
Python
|
office365/directory/identities/conditional_access_root.py
|
theodoriss/Office365-REST-Python-Client
|
3bd7a62dadcd3f0a0aceeaff7584fff3fd44886e
|
[
"MIT"
] | 544
|
2016-08-04T17:10:16.000Z
|
2022-03-31T07:17:20.000Z
|
office365/directory/identities/conditional_access_root.py
|
theodoriss/Office365-REST-Python-Client
|
3bd7a62dadcd3f0a0aceeaff7584fff3fd44886e
|
[
"MIT"
] | 438
|
2016-10-11T12:24:22.000Z
|
2022-03-31T19:30:35.000Z
|
office365/directory/identities/conditional_access_root.py
|
theodoriss/Office365-REST-Python-Client
|
3bd7a62dadcd3f0a0aceeaff7584fff3fd44886e
|
[
"MIT"
] | 202
|
2016-08-22T19:29:40.000Z
|
2022-03-30T20:26:15.000Z
|
from office365.entity import Entity
class ConditionalAccessRoot(Entity):
"""The conditionalAccessRoot resource is the entry point for the Conditional Access (CA) object model.
It doesn't contain any usable properties."""
pass
| 30
| 106
| 0.766667
| 30
| 240
| 6.133333
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.015075
| 0.170833
| 240
| 7
| 107
| 34.285714
| 0.909548
| 0.5875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 5
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.