function stringlengths 11 56k | repo_name stringlengths 5 60 | features list |
|---|---|---|
def for_user(cls, user):
assert user.is_authenticated(), "user must be authenticated"
user_state, _ = cls.objects.get_or_create(user=user)
return user_state | pinax/pinax-lms-activities | [
7,
3,
7,
14,
1423412548
] |
def set(self, key, value):
self.data[key] = value
self.save() | pinax/pinax-lms-activities | [
7,
3,
7,
14,
1423412548
] |
def activity_class(self):
return load_path_attr(self.activity_class_path) | pinax/pinax-lms-activities | [
7,
3,
7,
14,
1423412548
] |
def in_progress(self):
return next(iter(self.sessions.filter(completed=None)), None) | pinax/pinax-lms-activities | [
7,
3,
7,
14,
1423412548
] |
def latest(self):
session, _ = self.sessions.get_or_create(completed=None)
return session | pinax/pinax-lms-activities | [
7,
3,
7,
14,
1423412548
] |
def last_completed(self):
return self.sessions.filter(completed__isnull=False).order_by("-started").first() | pinax/pinax-lms-activities | [
7,
3,
7,
14,
1423412548
] |
def all_sessions(self):
return self.sessions.order_by("started") | pinax/pinax-lms-activities | [
7,
3,
7,
14,
1423412548
] |
def state_for_user(cls, user, activity_key):
assert user.is_authenticated(), "user must be authenticated"
return cls.objects.filter(user=user, activity_key=activity_key).first() | pinax/pinax-lms-activities | [
7,
3,
7,
14,
1423412548
] |
def progression(self):
if self.in_progress:
return "continue"
elif self.activity_class.repeatable:
return "repeat"
else:
return "completed" | pinax/pinax-lms-activities | [
7,
3,
7,
14,
1423412548
] |
def mark_completed(self):
self.completed = timezone.now()
self.save()
self.activity_state.completed_count = models.F("completed_count") + 1
self.activity_state.save() | pinax/pinax-lms-activities | [
7,
3,
7,
14,
1423412548
] |
def __init__(self, alt_abbrevs=tuple(), structures=tuple(), figures=None, artiris=tuple()):
self.alt_abbrevs = alt_abbrevs
self.structures = structures
self.artiris = artiris | tgbugs/pyontutils | [
15,
123,
15,
42,
1455242132
] |
def loadData(cls):
with open(cls.source, 'rt') as f:
lines = [l.rsplit('#')[0].strip() for l in f.readlines() if not l.startswith('#')]
return [l.rsplit(' ', 1) for l in lines] | tgbugs/pyontutils | [
15,
123,
15,
42,
1455242132
] |
def processData(cls):
structRecs = []
out = {}
for structure, abrv in cls.raw:
structRecs.append((abrv, structure))
if abrv in out:
out[abrv][0].append(structure)
else:
out[abrv] = ([structure], ())
return structRecs, out | tgbugs/pyontutils | [
15,
123,
15,
42,
1455242132
] |
def validate(cls, structRecs, out):
print(Counter(_[0] for _ in structRecs).most_common()[:5])
print(Counter(_[1] for _ in structRecs).most_common()[:5])
assert len(structRecs) == len([s for sl, _ in out.values() for s in sl]), 'There are non-unique abbreviations'
errata = {}
return out, errata | tgbugs/pyontutils | [
15,
123,
15,
42,
1455242132
] |
def parseData(cls):
a, b = cls.raw.split('List of Structures')
if not a:
los, loa = b.split('List of Abbreviations')
else:
los = b
_, loa = a.split('List of Abbreviations')
sr = []
for l in los.split('\n'):
if l and not l[0] == ';':
if ';' in l:
l, *comment = l.split(';')
l = l.strip()
print(l, comment)
#asdf = l.rsplit(' ', 1)
#print(asdf)
struct, abbrev = l.rsplit(' ', 1)
sr.append((abbrev, struct))
ar = []
for l in loa.split('\n'):
if l and not l[0] == ';':
if ';' in l:
l, *comment = l.split(';')
l = l.strip()
print(l, comment)
#asdf = l.rsplit(' ', 1)
#print(asdf)
abbrev, rest = l.split(' ', 1)
parts = rest.split(' ')
#print(parts)
for i, pr in enumerate(parts[::-1]):
#print(i, pr)
z = pr[0].isdigit()
if not z or i > 0 and z and pr[-1] != ',':
break
struct = ' '.join(parts[:-i])
figs = tuple(tuple(int(_) for _ in p.split('-'))
if '-' in p
else (tuple(f'{nl[:-1]}{l}'
for nl, *ls in p.split(',')
for l in (nl[-1], *ls))
if ',' in p or p[-1].isalpha()
else int(p))
for p in (_.rstrip(',') for _ in parts[-i:]))
figs = tuple(f for f in figs if f) # zero marks abbrevs in index that are not in figures
#print(struct)
ar.append((abbrev, struct, figs))
return sr, ar | tgbugs/pyontutils | [
15,
123,
15,
42,
1455242132
] |
def processData(cls):
sr, ar = cls.parseData()
out = {}
achild = {}
for a, s, f in ar:
if ', layer 1' in s or s.endswith(' layer 1'): # DTT1 ends in ' layer 1' without a comma
achild[a[:-1]] = a
continue # remove the precomposed, we will deal with them systematically
if a not in out:
out[a] = ([s], f)
else:
if s not in out[a][0]:
print(f'Found new label from ar for {a}:\n{s}\n{out[a][0]}')
out[a][0].append(s)
schild = {}
for a, s in sr:
if ', layer 1' in s or s.endswith(' layer 1'):
schild[a[:-1]] = a
continue # remove the precomposed, we will deal with them systematically
if a not in out:
out[a] = ([s], tuple())
else:
if s not in out[a][0]:
print(f'Found new label from sr for {a}:\n{s}\n{out[a][0]}')
out[a][0].append(s)
#raise TypeError(f'Mismatched labels on {a}: {s} {out[a][0]}')
return sr, ar, out, achild, schild | tgbugs/pyontutils | [
15,
123,
15,
42,
1455242132
] |
def validate(cls, sr, ar, out, achild, schild):
def missing(a, b):
am = a - b
bm = b - a
return am, bm
sabs = set(_[0] for _ in sr)
aabs = set(_[0] for _ in ar)
ssts = set(_[1] for _ in sr)
asts = set(_[1] for _ in ar)
ar2 = set(_[:2] for _ in ar)
aam, sam = missing(aabs, sabs)
asm, ssm = missing(asts, ssts)
ar2m, sr2m = missing(ar2, set(sr))
print('OK to skip')
print(sorted(aam))
print('Need to be created')
print(sorted(sam))
print()
print(sorted(asm))
print()
print(sorted(ssm))
print()
#print(sorted(ar2m))
#print()
#print(sorted(sr2m))
#print()
assert all(s in achild for s in schild), f'somehow the kids dont match {achild} {schild}\n' + str(sorted(set(a) - set(s) | set(s) - set(a)
for a, s in ((tuple(sorted(achild.items())),
tuple(sorted(schild.items()))),)))
for k, (structs, figs) in out.items():
for struct in structs:
assert not re.match('\d+-\d+', struct) and not re.match('\d+$', struct), f'bad struct {struct} in {k}'
errata = {'nodes with layers':achild}
return out, errata | tgbugs/pyontutils | [
15,
123,
15,
42,
1455242132
] |
def loadData(cls):
with open(os.path.expanduser(cls.source), 'rt') as f:
return [l for l in f.read().split('\n') if l] | tgbugs/pyontutils | [
15,
123,
15,
42,
1455242132
] |
def processData(cls):
out = {}
recs = []
parent_stack = [None]
old_depth = 0
layers = {}
for l in cls.raw:
depth, abbrev, _, name = l.split(' ', 3)
depth = len(depth)
if old_depth < depth: # don't change
parent = parent_stack[-1]
parent_stack.append(abbrev)
old_depth = depth
elif old_depth == depth:
if len(parent_stack) - 1 > depth:
parent_stack.pop()
parent = parent_stack[-1]
parent_stack.append(abbrev)
elif old_depth > depth: # bump back
for _ in range(old_depth - depth + 1):
parent_stack.pop()
parent = parent_stack[-1]
parent_stack.append(abbrev)
old_depth = depth
struct = None if name == '-------' else name
o = (depth, abbrev, struct, parent)
if '-' in abbrev:
# remove the precomposed, we will deal with them systematically
maybe_parent, rest = abbrev.split('-', 1)
if rest.isdigit() or rest == '1a' or rest == '1b': # Pir1a Pir1b
if parent == 'Unknown': # XXX special cases
if maybe_parent == 'Pi': # i think this was probably caused by an ocr error from Pir3 -> Pi3
continue
assert maybe_parent == parent, f'you fall into a trap {maybe_parent} {parent}'
if parent not in layers:
layers[parent] = []
layers[parent].append((layer, o)) # FIXME where does layer come from here?
# I think this comes from the previous iteration of the loop?!
elif struct is not None and ', layer 1' in struct:
# remove the precomposed, we will deal with them systematically
parent_, layer = abbrev[:-1], abbrev[-1]
if parent_ == 'CxA' and parent == 'Amy': # XXX special cases
parent = 'CxA'
elif parent == 'Unknown':
if parent_ == 'LOT':
parent = 'LOT'
elif parent_ == 'Tu':
parent = 'Tu'
assert parent_ == parent, f'wrong turn friend {parent_} {parent}'
if parent not in layers:
layers[parent] = []
layers[parent].append((layer, o))
else:
recs.append(o)
out[abbrev] = ([struct], (), parent)
errata = {'nodes with layers':layers}
return recs, out, errata | tgbugs/pyontutils | [
15,
123,
15,
42,
1455242132
] |
def validate(cls, trecs, tr, errata):
print(Counter(_[1] for _ in trecs).most_common()[:5])
('CxA1', 2), ('Tu1', 2), ('LOT1', 2), ('ECIC3', 2)
assert len(tr) == len(trecs), 'Abbreviations in tr are not unique!'
return tr, errata | tgbugs/pyontutils | [
15,
123,
15,
42,
1455242132
] |
def fixes_abbrevs(self):
fixes_abbrevs = set()
for f in self._fixes:
fixes_abbrevs.add(f[0])
for dupe in self._dupes.values():
fixes_abbrevs.add(dupe.alt_abbrevs[0])
return fixes_abbrevs | tgbugs/pyontutils | [
15,
123,
15,
42,
1455242132
] |
def fixes_prov(self):
_fixes_prov = {}
for f in self._fixes:
for l in f[1][0]:
_fixes_prov[l] = [ParcOnt.wasGeneratedBy.format(line=getSourceLine(self.__class__))] # FIXME per file
return _fixes_prov | tgbugs/pyontutils | [
15,
123,
15,
42,
1455242132
] |
def dupes_structs(self):
ds = {'cerebellar lobules', 'cerebellar lobule'}
for dupe in self._dupes.values():
for struct in dupe.structures:
ds.add(struct)
return ds | tgbugs/pyontutils | [
15,
123,
15,
42,
1455242132
] |
def fixes(self):
_, _, collisions, _ = self.records()
for a, (ss, f, arts) in self._fixes:
if (a, ss[0]) in collisions:
f.update(collisions[a, ss[1]]) # have to use 1 since we want "layer n" as the pref
yield a, ([], ss, f, arts) | tgbugs/pyontutils | [
15,
123,
15,
42,
1455242132
] |
def _makeIriLookup(self):
# FIXME need to validate that we didn't write the graph first...
g = Graph().parse(self._graph.filename, format='turtle')
ids = [s for s in g.subjects(rdf.type, owl.Class) if self.namespace in s]
index0 = Label.propertyMapping['abbrevs'],
index1 = Label.propertyMapping['label'], Label.propertyMapping['synonyms']
out = {}
for i in ids:
for p0 in index0:
for o0 in g.objects(i, p0):
for p1 in index1:
for o1 in g.objects(i, p1):
key = o0, o1
value = i
if key in out:
raise KeyError(f'Key {key} already in output!')
out[key] = value
return out | tgbugs/pyontutils | [
15,
123,
15,
42,
1455242132
] |
def validate(self):
# check for duplicate labels
labels = list(self.graph.objects(None, rdfs.label))
assert len(labels) == len(set(labels)), f'There are classes with duplicate labels! {Counter(labels).most_common()[:5]}'
# check for unexpected duplicate abbreviations
abrevs = list(self.graph.objects(None, NIFRID.abbrev))
# remove expected numeric/layer/lobule duplicates
filt = [a for a in abrevs if not a.isdigit() and a.value not in ('6a', '6b')]
assert len(filt) == len(set(filt)), f'DUPES! {Counter(filt).most_common()[:5]}'
# check for abbreviations without corresponding structure ie 'zzzzzz'
syns = list(self.graph.objects(None, NIFRID.synonym))
for thing in labels + syns:
trips = [(s, o) for s in self.graph.subjects(None, thing) for p, o in self.graph.predicate_objects(s)]
assert 'zzzzzz' not in thing, f'{trips} has bad label/syn suggesting a problem with the source file'
return self | tgbugs/pyontutils | [
15,
123,
15,
42,
1455242132
] |
def do_struct_prov(structure, source=None, artiri=None):
if artiri is None:
artiri = source.artifact.iri
if structure not in struct_prov:
struct_prov[structure] = [artiri]
elif artiri not in struct_prov[structure]:
struct_prov[structure].append(artiri) | tgbugs/pyontutils | [
15,
123,
15,
42,
1455242132
] |
def curate(self):
fr, err4 = PaxSrAr_4()
sx, err6 = PaxSrAr_6()
sx2, _ = PaxSr_6()
tr, err6t = PaxTree_6()
sfr = set(fr)
ssx = set(sx)
ssx2 = set(sx2)
str_ = set(tr)
in_four_not_in_six = sfr - ssx
in_six_not_in_four = ssx - sfr
in_tree_not_in_six = str_ - ssx
in_six_not_in_tree = ssx - str_
in_six2_not_in_six = ssx2 - ssx
in_six_not_in_six2 = ssx - ssx2
print(len(in_four_not_in_six), len(in_six_not_in_four),
len(in_tree_not_in_six), len(in_six_not_in_tree),
len(in_six2_not_in_six), len(in_six_not_in_six2),
)
tr_struct_abrv = {}
for abrv, ((struct, *extra), _, parent) in tr.items():
tr_struct_abrv[struct] = abrv
if abrv in sx:
#print(abrv, struct, parent)
if struct and struct not in sx[abrv][0]:
print(f'Found new label from tr for {abrv}:\n{struct}\n{sx[abrv][0]}\n')
# can't run these for tr yet
#reduced = set(tr_struct_abrv.values())
#print(sorted(_ for _ in tr if _ not in reduced))
#assert len(tr_struct_abrv) == len(tr), 'mapping between abrvs and structs is not 1:1 for tr'
sx2_struct_abrv = {}
for abrv, ((struct, *extra), _) in sx2.items():
sx2_struct_abrv[struct] = abrv
if abrv in sx:
if struct and struct not in sx[abrv][0]:
print(f'Found new label from sx2 for {abrv}:\n{struct}\n{sx[abrv][0]}\n')
reduced = set(sx2_struct_abrv.values())
print(sorted(_ for _ in reduced if _ not in sx2)) # ah inconsistent scoping rules in class defs...
assert len(sx2_struct_abrv) == len(sx2), 'there is a duplicate struct'
sx_struct_abrv = {}
for abrv, ((struct, *extra), _) in sx.items():
sx_struct_abrv[struct] = abrv
reduced = set(sx_struct_abrv.values())
print(sorted(_ for _ in reduced if _ not in sx))
assert len(sx_struct_abrv) == len(sx), 'there is a duplicate struct'
# TODO test whether any of the tree members that were are going to exclude have children that we are going to include
names_match_not_abbervs = {}
tree_no_name = {_:tr[_] for _ in sorted(in_tree_not_in_six) if not tr[_][0][0]}
tree_with_name = {_:tr[_] for _ in sorted(in_tree_not_in_six) if tr[_][0][0]}
not_in_tree_with_figures = {_:sx[_] for _ in sorted(in_six_not_in_tree) if sx[_][-1]}
a = f'{"abv":<25} | {"structure name":<60} | parent abv\n' + '\n'.join(f'{k:<25} | {v[0][0]:<60} | {v[-1]}' for k, v in tree_with_name.items())
b = f'{"abv":<25} | {"structure name":<15} | parent abv\n' + '\n'.join(f'{k:<25} | {"":<15} | {v[-1]}' for k, v in tree_no_name.items())
c = f'abv | {"structure name":<60} | figures (figure ranges are tuples)\n' + '\n'.join(f'{k:<6} | {v[0][0]:<60} | {v[-1]}' for k, v in not_in_tree_with_figures.items())
with open(os.path.expanduser('~/ni/dev/nifstd/paxinos/tree-with-name.txt'), 'wt') as f: f.write(a)
with open(os.path.expanduser('~/ni/dev/nifstd/paxinos/tree-no-name.txt'), 'wt') as f: f.write(b)
with open(os.path.expanduser('~/ni/dev/nifstd/paxinos/not-in-tree-with-figures.txt'), 'wt') as f: f.write(c)
#match_name_not_abrev = set(v[0][0] for v in tree_with_name.values()) & set(v[0][0] for v in sx.values())
_match_name_not_abrev = {}
for a, (alts, (s, *extra), f, *_) in PaxRatLabels().records()[0].items():
if s not in _match_name_not_abrev:
_match_name_not_abrev[s] = [a]
elif a not in _match_name_not_abrev[s]:
_match_name_not_abrev[s].append(a)
match_name_not_abrev = {k:v for k, v in _match_name_not_abrev.items() if len(v) > 1}
abrv_match_not_name = {k:v[0] for k, v in PaxRatLabels().records()[0].items() if len(v[0]) > 1}
_ = [print(k, *v[0]) for k, v in PaxRatLabels().records()[0].items() if len(v[0]) > 1]
breakpoint()
#self.in_tree_not_in_six = in_tree_not_in_six # need for skipping things that were not actually named by paxinos | tgbugs/pyontutils | [
15,
123,
15,
42,
1455242132
] |
def __init__(self, source, abbreviation, structure, artifacts,
figures=tuple(),
synonyms=tuple(),
altAbbrevs=tuple()):
self.source = source
self.abbreviation = abbreviation
self.structure = structure
self.artifacts = artifacts | tgbugs/pyontutils | [
15,
123,
15,
42,
1455242132
] |
def __hash__(self):
return hash(self.abbreviation) | tgbugs/pyontutils | [
15,
123,
15,
42,
1455242132
] |
def define(word, word_eol, userdata):
if len(word) >= 2:
_word = xchat.strip(word[1])
_number = 1
if len(word) >= 3:
_number = int(xchat.strip(word[2]))
else:
xchat.prnt('Define Usage: /define word [number]')
xchat.prnt(' number being alternate definition')
return xchat.EAT_ALL
url="http://www.google.com/dictionary/json?callback=s&q=" + _word + "&sl=en&tl=en&restrict=pr,de&client=te"
obj=urllib.urlopen(url);
content=obj.read()
obj.close()
content=content[2:-10]
dic=ast.literal_eval(content)
if dic.has_key("webDefinitions"):
webdef=dic["webDefinitions"]
webdef=webdef[0]
webdef=webdef["entries"]
index=1
for i in webdef:
if index == _number:
if i["type"]=="meaning":
ans=i["terms"]
op=ans[0]['text']
split=op.split(';')
xchat.prnt(_word + ': ' + split[0].strip())
index+=1
return xchat.EAT_ALL
else:
xchat.prnt('Description unavailable for ' + _word)
return xchat.EAT_ALL | TingPing/plugins | [
101,
42,
101,
2,
1335040714
] |
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'loadBalancerName': self._serialize.url("load_balancer_name", load_balancer_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def set_buffer(ctx, buf):
global _keepalive # See note in JsonRpcProtocol
_keepalive = ctx.buf = _ffi.from_buffer(buf)
ctx.buflen = len(buf)
ctx.offset = 0 | geertj/gruvi | [
95,
11,
95,
2,
1355003397
] |
def test_simple(self):
r = b'{ "foo": "bar" }'
ctx = split_string(r)
self.assertEqual(ctx.error, 0)
self.assertEqual(ctx.offset, len(r)) | geertj/gruvi | [
95,
11,
95,
2,
1355003397
] |
def test_trailing_whitespace(self):
r = b'{ "foo": "bar" } '
ctx = split_string(r)
self.assertEqual(ctx.error, 0)
self.assertEqual(ctx.offset, len(r)-1)
error = _lib.json_split(ctx)
self.assertEqual(error, ctx.error) == _lib.INCOMPLETE
self.assertEqual(ctx.offset, len(r)) | geertj/gruvi | [
95,
11,
95,
2,
1355003397
] |
def test_string_escape(self):
r = b'{ "foo": "b\\"}" }'
ctx = split_string(r)
self.assertEqual(ctx.error, 0)
self.assertEqual(ctx.offset, len(r)) | geertj/gruvi | [
95,
11,
95,
2,
1355003397
] |
def test_multiple(self):
r = b'{ "foo": "bar" } { "baz": "qux" }'
ctx = split_string(r)
self.assertEqual(ctx.error, 0)
self.assertEqual(ctx.offset, 16)
error = _lib.json_split(ctx)
self.assertEqual(error, ctx.error) == 0
self.assertEqual(ctx.offset, len(r)) | geertj/gruvi | [
95,
11,
95,
2,
1355003397
] |
def setUp(self):
super(TestJsonRpcV1, self).setUp()
self.version = JsonRpcVersion.create('1.0') | geertj/gruvi | [
95,
11,
95,
2,
1355003397
] |
def test_check_request_missing_id(self):
v = self.version
msg = {'method': 'foo', 'params': []}
self.assertRaises(ValueError, v.check_message, msg) | geertj/gruvi | [
95,
11,
95,
2,
1355003397
] |
def test_check_request_illegal_method(self):
v = self.version
msg = {'id': 1, 'method': None, 'params': []}
self.assertRaises(ValueError, v.check_message, msg)
msg = {'id': 1, 'method': 1, 'params': []}
self.assertRaises(ValueError, v.check_message, msg)
msg = {'id': 1, 'method': {}, 'params': []}
self.assertRaises(ValueError, v.check_message, msg)
msg = {'id': 1, 'method': [], 'params': []}
self.assertRaises(ValueError, v.check_message, msg)
msg = {'id': 1, 'method': [1], 'params': []}
self.assertRaises(ValueError, v.check_message, msg) | geertj/gruvi | [
95,
11,
95,
2,
1355003397
] |
def test_check_request_illegal_params(self):
v = self.version
msg = {'id': 1, 'method': 'foo', 'params': None}
self.assertRaises(ValueError, v.check_message, msg)
msg = {'id': 1, 'method': 'foo', 'params': 1}
self.assertRaises(ValueError, v.check_message, msg)
msg = {'id': 1, 'method': 'foo', 'params': 'foo'}
self.assertRaises(ValueError, v.check_message, msg)
msg = {'id': 1, 'method': 'foo', 'params': {}} | geertj/gruvi | [
95,
11,
95,
2,
1355003397
] |
def test_check_response(self):
v = self.version
msg = {'id': 1, 'result': 'foo', 'error': None}
self.assertEqual(v.check_message(msg), jsonrpc.RESPONSE) | geertj/gruvi | [
95,
11,
95,
2,
1355003397
] |
def test_check_response_error(self):
v = self.version
msg = {'id': 1, 'result': None, 'error': {'code': 1}}
self.assertEqual(v.check_message(msg), jsonrpc.RESPONSE) | geertj/gruvi | [
95,
11,
95,
2,
1355003397
] |
def test_check_response_missing_result(self):
v = self.version
msg = {'id': 1, 'error': None}
self.assertRaises(ValueError, v.check_message, msg) | geertj/gruvi | [
95,
11,
95,
2,
1355003397
] |
def test_check_response_illegal_error(self):
v = self.version
msg = {'id': 1, 'result': None, 'error': 1}
self.assertRaises(ValueError, v.check_message, msg)
msg = {'id': 1, 'result': None, 'error': 'foo'}
self.assertRaises(ValueError, v.check_message, msg)
msg = {'id': 1, 'result': None, 'error': []}
self.assertRaises(ValueError, v.check_message, msg) | geertj/gruvi | [
95,
11,
95,
2,
1355003397
] |
def test_check_response_extraneous_fields(self):
v = self.version
msg = {'id': 1, 'result': 1, 'error': None, 'bar': 'baz'}
self.assertRaises(ValueError, v.check_message, msg) | geertj/gruvi | [
95,
11,
95,
2,
1355003397
] |
def test_create_request_notification(self):
v = self.version
msg = v.create_request('foo', [], notification=True)
self.assertIsNone(msg['id'])
self.assertEqual(msg['method'], 'foo')
self.assertEqual(msg['params'], [])
self.assertEqual(len(msg), 3) | geertj/gruvi | [
95,
11,
95,
2,
1355003397
] |
def test_create_response_null_result(self):
v = self.version
req = {'id': 'gruvi.0'}
msg = v.create_response(req, None)
self.assertEqual(msg['id'], req['id'])
self.assertIsNone(msg['result'])
self.assertIsNone(msg['error'])
self.assertEqual(len(msg), 3) | geertj/gruvi | [
95,
11,
95,
2,
1355003397
] |
def setUp(self):
super(TestJsonRpcV2, self).setUp()
self.version = JsonRpcVersion.create('2.0') | geertj/gruvi | [
95,
11,
95,
2,
1355003397
] |
def test_check_request_notification(self):
v = self.version
msg = {'jsonrpc': '2.0', 'method': 'foo', 'params': []}
self.assertEqual(v.check_message(msg), jsonrpc.REQUEST)
msg = {'jsonrpc': '2.0', 'method': 'foo', 'params': {}}
self.assertEqual(v.check_message(msg), jsonrpc.REQUEST) | geertj/gruvi | [
95,
11,
95,
2,
1355003397
] |
def test_check_request_missing_method(self):
v = self.version
msg = {'jsonrpc': '2.0', 'id': 1, 'params': []}
self.assertRaises(ValueError, v.check_message, msg) | geertj/gruvi | [
95,
11,
95,
2,
1355003397
] |
def test_check_request_missing_params(self):
v = self.version
msg = {'jsonrpc': '2.0', 'id': 1, 'method': 'foo'}
self.assertEqual(v.check_message(msg), jsonrpc.REQUEST) | geertj/gruvi | [
95,
11,
95,
2,
1355003397
] |
def test_check_request_extraneous_fields(self):
v = self.version
msg = {'jsonrpc': '2.0', 'id': 1, 'method': 'foo', 'params': [], 'bar': 'baz'}
self.assertRaises(ValueError, v.check_message, msg) | geertj/gruvi | [
95,
11,
95,
2,
1355003397
] |
def test_check_response_null_result(self):
v = self.version
msg = {'jsonrpc': '2.0', 'id': 1, 'result': None}
self.assertEqual(v.check_message(msg), jsonrpc.RESPONSE) | geertj/gruvi | [
95,
11,
95,
2,
1355003397
] |
def test_check_response_missing_id(self):
v = self.version
msg = {'jsonrpc': '2.0', 'result': 'foo'}
self.assertRaises(ValueError, v.check_message, msg) | geertj/gruvi | [
95,
11,
95,
2,
1355003397
] |
def test_check_response_error_missing_id(self):
v = self.version
msg = {'jsonrpc': '2.0', 'error': {'code': 10}}
self.assertRaises(ValueError, v.check_message, msg) | geertj/gruvi | [
95,
11,
95,
2,
1355003397
] |
def test_check_response_missing_result_and_error(self):
v = self.version
msg = {'jsonrpc': '2.0', 'id': 1}
self.assertRaises(ValueError, v.check_message, msg) | geertj/gruvi | [
95,
11,
95,
2,
1355003397
] |
def test_check_response_result_error_both_present(self):
v = self.version
msg = {'jsonrpc': '2.0', 'id': 1, 'result': None, 'error': None}
self.assertRaises(ValueError, v.check_message, msg)
msg = {'jsonrpc': '2.0', 'id': 1, 'result': 1, 'error': None}
self.assertRaises(ValueError, v.check_message, msg)
msg = {'jsonrpc': '2.0', 'id': 1, 'result': None, 'error': {'code': 10}}
self.assertRaises(ValueError, v.check_message, msg) | geertj/gruvi | [
95,
11,
95,
2,
1355003397
] |
def test_create_request(self):
v = self.version
msg = v.create_request('foo', [])
self.assertEqual(msg['jsonrpc'], '2.0')
self.assertIsInstance(msg['id'], six.string_types)
self.assertEqual(msg['method'], 'foo')
self.assertEqual(msg['params'], [])
self.assertEqual(len(msg), 4) | geertj/gruvi | [
95,
11,
95,
2,
1355003397
] |
def test_create_response(self):
v = self.version
req = {'id': 'gruvi.0'}
msg = v.create_response(req, 1)
self.assertEqual(msg['jsonrpc'], '2.0')
self.assertEqual(msg['id'], req['id'])
self.assertEqual(msg['result'], 1)
self.assertNotIn('error', msg)
self.assertEqual(len(msg), 3) | geertj/gruvi | [
95,
11,
95,
2,
1355003397
] |
def test_create_response_error(self):
v = self.version
req = {'id': 'gruvi.0'}
msg = v.create_response(req, error={'code': 1})
self.assertEqual(msg['jsonrpc'], '2.0')
self.assertEqual(msg['id'], req['id'])
self.assertNotIn('result', msg)
self.assertEqual(msg['error'], {'code': 1})
self.assertEqual(len(msg), 3) | geertj/gruvi | [
95,
11,
95,
2,
1355003397
] |
def setUp(self):
super(TestJsonRpcProtocol, self).setUp()
self.transport = MockTransport()
self.protocol = JsonRpcProtocol(self.message_handler)
self.transport.start(self.protocol)
self.messages = []
self.protocols = [] | geertj/gruvi | [
95,
11,
95,
2,
1355003397
] |
def get_messages(self):
# run dispatcher thread so that it calls our message handler
gruvi.sleep(0)
return self.messages | geertj/gruvi | [
95,
11,
95,
2,
1355003397
] |
def test_multiple(self):
m = b'{ "id": "1", "method": "foo", "params": [] }' \
b'{ "id": "2", "method": "bar", "params": [] }'
proto = self.protocol
proto.data_received(m)
mm = self.get_messages()
self.assertEqual(len(mm), 2)
self.assertEqual(mm[0], {'id': '1', 'method': 'foo', 'params': []})
self.assertEqual(mm[1], {'id': '2', 'method': 'bar', 'params': []})
pp = self.protocols
self.assertEqual(len(pp), 2)
self.assertIs(pp[0], proto)
self.assertIs(pp[1], proto) | geertj/gruvi | [
95,
11,
95,
2,
1355003397
] |
def test_incremental(self):
m = b'{ "id": "1", "method": "foo", "params": [] }'
proto = self.protocol
for i in range(len(m)-1):
proto.data_received(m[i:i+1])
self.assertEqual(self.get_messages(), [])
proto.data_received(m[-1:])
mm = self.get_messages()
self.assertEqual(len(mm), 1)
self.assertEqual(mm[0], {'id': '1', 'method': 'foo', "params": []}) | geertj/gruvi | [
95,
11,
95,
2,
1355003397
] |
def test_encoding_error(self):
m = b'{ xxx\xff }'
proto = self.protocol
proto.data_received(m)
self.assertEqual(self.get_messages(), [])
self.assertIsInstance(proto._error, JsonRpcError) | geertj/gruvi | [
95,
11,
95,
2,
1355003397
] |
def test_illegal_jsonrpc(self):
m = b'{ "xxxx": "yyyy" }'
proto = self.protocol
proto.data_received(m)
self.assertEqual(self.get_messages(), [])
self.assertIsInstance(proto._error, JsonRpcError) | geertj/gruvi | [
95,
11,
95,
2,
1355003397
] |
def test_flow_control(self):
# Write more messages than the protocol is willing to pipeline. Flow
# control should kick in and alternate scheduling of the producer and
# the consumer.
proto, trans = self.protocol, self.transport
self.assertTrue(trans._reading)
proto.max_pipeline_size = 10
message = b'{ "id": 1, "method": "foo", "params": [] }'
interrupted = 0
for i in range(1000):
proto.data_received(message)
if not trans._reading:
interrupted += 1
gruvi.sleep(0) # run dispatcher
self.assertTrue(trans._reading)
mm = self.get_messages()
self.assertEqual(len(mm), 1000)
self.assertEqual(interrupted, 100)
message = json.loads(message.decode('utf8'))
for m in mm:
self.assertEqual(m, message) | geertj/gruvi | [
95,
11,
95,
2,
1355003397
] |
def reflect_app(message, transport, protocol):
if message.get('method') != 'echo':
return
value = protocol.call_method('echo', *message['params'])
protocol.send_response(message, value) | geertj/gruvi | [
95,
11,
95,
2,
1355003397
] |
def application(message, transport, protocol):
if message.get('id') is None:
notifications.append((message['method'], message['params']))
elif message['method'] == 'get_notifications':
protocol.send_response(message, notifications) | geertj/gruvi | [
95,
11,
95,
2,
1355003397
] |
def test_errno(self):
code = jsonrpc.SERVER_ERROR
self.assertIsInstance(code, int)
name = jsonrpc.errorcode[code]
self.assertIsInstance(name, str)
self.assertEqual(getattr(jsonrpc, name), code)
desc = jsonrpc.strerror(code)
self.assertIsInstance(desc, str) | geertj/gruvi | [
95,
11,
95,
2,
1355003397
] |
def test_call_method_pipe(self):
server = JsonRpcServer(echo_app)
server.listen(self.pipename(abstract=True))
addr = server.addresses[0]
client = JsonRpcClient()
client.connect(addr)
result = client.call_method('echo', 'foo')
self.assertEqual(result, ['foo'])
server.close()
client.close() | geertj/gruvi | [
95,
11,
95,
2,
1355003397
] |
def test_call_method_no_args(self):
server = JsonRpcServer(echo_app)
server.listen(('127.0.0.1', 0))
addr = server.addresses[0]
client = JsonRpcClient()
client.connect(addr)
result = client.call_method('echo')
self.assertEqual(result, [])
server.close()
client.close() | geertj/gruvi | [
95,
11,
95,
2,
1355003397
] |
def test_call_method_error(self):
server = JsonRpcServer(echo_app)
server.listen(('127.0.0.1', 0))
addr = server.addresses[0]
client = JsonRpcClient()
client.connect(addr)
exc = self.assertRaises(JsonRpcError, client.call_method, 'echo2')
self.assertIsInstance(exc, JsonRpcError)
self.assertIsInstance(exc.error, dict)
self.assertEqual(exc.error['code'], jsonrpc.METHOD_NOT_FOUND)
server.close()
client.close() | geertj/gruvi | [
95,
11,
95,
2,
1355003397
] |
def test_call_method_ping_pong(self):
server = JsonRpcServer(reflect_app)
server.listen(('127.0.0.1', 0))
addr = server.addresses[0]
client = JsonRpcClient(echo_app)
client.connect(addr)
result = client.call_method('echo', 'foo')
self.assertEqual(result, ['foo'])
server.close()
client.close() | geertj/gruvi | [
95,
11,
95,
2,
1355003397
] |
def test_send_whitespace(self):
server = JsonRpcServer(echo_app)
server.listen(('127.0.0.1', 0))
addr = server.addresses[0]
client = JsonRpcClient()
client.connect(addr)
exc = None
try:
chunk = b' ' * 1024
while True:
client.transport.write(chunk)
gruvi.sleep(0)
except Exception as e:
exc = e
self.assertIsInstance(exc, TransportError)
server.close()
client.close() | geertj/gruvi | [
95,
11,
95,
2,
1355003397
] |
def test_connection_limit(self):
server = JsonRpcServer(echo_app)
server.listen(('127.0.0.1', 0))
addr = server.addresses[0]
server.max_connections = 2
clients = []
exc = None
try:
for i in range(3):
client = JsonRpcClient(timeout=2)
client.connect(addr)
client.call_method('echo')
clients.append(client)
except Exception as e:
exc = e
self.assertIsInstance(exc, TransportError)
self.assertEqual(len(server.connections), server.max_connections)
for client in clients:
client.close()
server.close() | geertj/gruvi | [
95,
11,
95,
2,
1355003397
] |
def time_zone(t):
if t.tm_isdst == 1 and time.daylight == 1:
tz_sec = time.altzone
tz_name = time.tzname[1]
else:
tz_sec = time.timezone
tz_name = time.tzname[0]
if tz_sec > 0:
tz_sign = '-'
else:
tz_sign = '+'
tz_offset = '%s%02d%02d' % (tz_sign, abs(tz_sec)//3600, abs(tz_sec//60)%60)
return (tz_offset, tz_name) | jsubpy/jsub | [
2,
2,
2,
1,
1416218010
] |
def formatTime(self, record, datefmt=None):
ct = time.localtime(record.created)
if datefmt:
s = time.strftime(datefmt, ct)
else:
t = time.strftime('%Y-%m-%d %H:%M:%S', ct)
ms = '%03d' % record.msecs
tz_offset, tz_name = time_zone(ct)
s = '%s.%03d %s %s' % (t, record.msecs, tz_offset, tz_name)
return s | jsubpy/jsub | [
2,
2,
2,
1,
1416218010
] |
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {}) | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ExpressRouteCircuitPeering', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def list(
self,
resource_group_name: str,
circuit_name: str,
**kwargs: Any | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def setUp(self):
self.base = CircleCIException('fake')
self.key = BadKeyError('fake')
self.verb = BadVerbError('fake')
self.filter = InvalidFilterError('fake', 'status')
self.afilter = InvalidFilterError('fake', 'artifacts') | levlaz/circleci.py | [
47,
28,
47,
9,
1508720288
] |
def test_verb_message(self):
self.assertIn('DELETE', self.verb.message) | levlaz/circleci.py | [
47,
28,
47,
9,
1508720288
] |
def __init__(self, *args, **kwargs):
self.modules_cbbox = QtWidgets.QComboBox()
self.outputs_cbbox = QtWidgets.QComboBox()
self.refresh_btn = QtWidgets.QPushButton("Refresh")
self.prebuild_btn = QtWidgets.QPushButton("Prebuild")
super(View, self).__init__(*args, **kwargs) | Sookhaal/auri_maya_rigging_scripts | [
10,
3,
10,
10,
1502370963
] |
def set_model(self):
self.model = Model() | Sookhaal/auri_maya_rigging_scripts | [
10,
3,
10,
10,
1502370963
] |
def setup_ui(self):
self.modules_cbbox.setModel(self.ctrl.modules_with_output)
self.modules_cbbox.currentTextChanged.connect(self.ctrl.on_modules_cbbox_changed)
self.outputs_cbbox.setModel(self.ctrl.outputs_model)
self.outputs_cbbox.currentTextChanged.connect(self.ctrl.on_outputs_cbbox_changed)
self.refresh_btn.clicked.connect(self.ctrl.look_for_parent)
self.prebuild_btn.clicked.connect(self.ctrl.prebuild)
main_layout = QtWidgets.QVBoxLayout()
select_parent_layout = QtWidgets.QVBoxLayout()
select_parent_grp = grpbox("Select parent", select_parent_layout)
cbbox_layout = QtWidgets.QHBoxLayout()
cbbox_layout.addWidget(self.modules_cbbox)
cbbox_layout.addWidget(self.outputs_cbbox)
select_parent_layout.addLayout(cbbox_layout)
select_parent_layout.addWidget(self.refresh_btn)
main_layout.addWidget(select_parent_grp)
main_layout.addWidget(self.prebuild_btn)
self.setLayout(main_layout) | Sookhaal/auri_maya_rigging_scripts | [
10,
3,
10,
10,
1502370963
] |
def __init__(self, model, view):
"""
Args:
model (Model):
view (View):
"""
self.guides_grp = None
self.guide = None
self.guide_name = "None"
RigController.__init__(self, model, view) | Sookhaal/auri_maya_rigging_scripts | [
10,
3,
10,
10,
1502370963
] |
def execute(self):
self.prebuild()
self.delete_existing_objects()
self.connect_to_parent()
cog_shape = rig_lib.large_box_curve("{0}_CTRL_shape".format(self.model.module_name))
cog_ctrl = rig_lib.create_jnttype_ctrl(name="{0}_CTRL".format(self.model.module_name), shape=cog_shape,
drawstyle=2)
cog_ofs = pmc.group(cog_ctrl, n="{0}_ctrl_OFS".format(self.model.module_name))
cog_ofs.setAttr("translate", pmc.xform(self.guide, q=1, ws=1, translation=1))
pmc.parent(cog_ofs, self.ctrl_input_grp)
rig_lib.create_output(name="{0}_OUTPUT".format(self.model.module_name), parent=cog_ctrl)
rig_lib.clean_ctrl(cog_ctrl, 20, trs="s")
self.jnt_input_grp.setAttr("visibility", 0)
self.parts_grp.setAttr("visibility", 0)
self.guides_grp.setAttr("visibility", 0)
info_crv = rig_lib.signature_shape_curve("{0}_INFO".format(self.model.module_name))
info_crv.getShape().setAttr("visibility", 0)
info_crv.setAttr("hiddenInOutliner", 1)
info_crv.setAttr("translateX", lock=True, keyable=False, channelBox=False)
info_crv.setAttr("translateY", lock=True, keyable=False, channelBox=False)
info_crv.setAttr("translateZ", lock=True, keyable=False, channelBox=False)
info_crv.setAttr("rotateX", lock=True, keyable=False, channelBox=False)
info_crv.setAttr("rotateY", lock=True, keyable=False, channelBox=False)
info_crv.setAttr("rotateZ", lock=True, keyable=False, channelBox=False)
info_crv.setAttr("scaleX", lock=True, keyable=False, channelBox=False)
info_crv.setAttr("scaleY", lock=True, keyable=False, channelBox=False)
info_crv.setAttr("scaleZ", lock=True, keyable=False, channelBox=False)
info_crv.setAttr("visibility", lock=True, keyable=False, channelBox=False)
info_crv.setAttr("overrideEnabled", 1)
info_crv.setAttr("overrideDisplayType", 2)
pmc.parent(info_crv, self.parts_grp)
rig_lib.add_parameter_as_extra_attr(info_crv, "Module", "cog")
rig_lib.add_parameter_as_extra_attr(info_crv, "parent_Module", self.model.selected_module)
rig_lib.add_parameter_as_extra_attr(info_crv, "parent_output", self.model.selected_output)
pmc.select(cl=1) | Sookhaal/auri_maya_rigging_scripts | [
10,
3,
10,
10,
1502370963
] |
def setUp(self):
"""
Create an instance each time for testing.
"""
self.instance = ListHostsHandler() | RHInception/talook | [
14,
5,
14,
4,
1383311815
] |
def start_response(code, headers):
buffer['code'] = code
buffer['headers'] = headers | RHInception/talook | [
14,
5,
14,
4,
1383311815
] |
def build_list_request(
subscription_id: str,
location: str,
*,
shared_to: Optional[Union[str, "_models.SharedToValues"]] = None,
**kwargs: Any | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def build_get_request(
subscription_id: str,
location: str,
gallery_unique_name: str,
**kwargs: Any | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def list(
self,
location: str,
shared_to: Optional[Union[str, "_models.SharedToValues"]] = None,
**kwargs: Any | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def prepare_request(next_link=None):
if not next_link: | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def extract_data(pipeline_response):
deserialized = self._deserialize("SharedGalleryList", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem) | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.