body_hash
stringlengths 64
64
| body
stringlengths 23
109k
| docstring
stringlengths 1
57k
| path
stringlengths 4
198
| name
stringlengths 1
115
| repository_name
stringlengths 7
111
| repository_stars
float64 0
191k
| lang
stringclasses 1
value | body_without_docstring
stringlengths 14
108k
| unified
stringlengths 45
133k
|
|---|---|---|---|---|---|---|---|---|---|
9add8fec4b67c4a9ae2ffc1e3c488fdcb8a6aa4dd23d033864c485e911c4889f
|
def get_emoji_img(self):
'\n 打开emoji图片\n Open emoji image\n '
length_list = INITIAL_UNICODE[self.char]
emoji_unicode = None
for length in length_list:
emoji_unicode_temp = self.paragraph[self.char_index:(self.char_index + length)]
if (emoji_unicode_temp in UNICODE_TO_PATH):
emoji_unicode = emoji_unicode_temp
self.char_next = (self.char_index + length)
break
if (emoji_unicode is None):
self.char_next = NEGATIVE
return None
emoji_file_name = UNICODE_TO_PATH.get(emoji_unicode)
if (emoji_file_name is None):
self.char_next = NEGATIVE
return None
emoji_img = Image.open(os.path.join(self.emoji_folder, emoji_file_name))
return emoji_img
|
打开emoji图片
Open emoji image
|
emoji2pic/main.py
|
get_emoji_img
|
sniper-py/emoji2pic-python
| 7
|
python
|
def get_emoji_img(self):
'\n 打开emoji图片\n Open emoji image\n '
length_list = INITIAL_UNICODE[self.char]
emoji_unicode = None
for length in length_list:
emoji_unicode_temp = self.paragraph[self.char_index:(self.char_index + length)]
if (emoji_unicode_temp in UNICODE_TO_PATH):
emoji_unicode = emoji_unicode_temp
self.char_next = (self.char_index + length)
break
if (emoji_unicode is None):
self.char_next = NEGATIVE
return None
emoji_file_name = UNICODE_TO_PATH.get(emoji_unicode)
if (emoji_file_name is None):
self.char_next = NEGATIVE
return None
emoji_img = Image.open(os.path.join(self.emoji_folder, emoji_file_name))
return emoji_img
|
def get_emoji_img(self):
'\n 打开emoji图片\n Open emoji image\n '
length_list = INITIAL_UNICODE[self.char]
emoji_unicode = None
for length in length_list:
emoji_unicode_temp = self.paragraph[self.char_index:(self.char_index + length)]
if (emoji_unicode_temp in UNICODE_TO_PATH):
emoji_unicode = emoji_unicode_temp
self.char_next = (self.char_index + length)
break
if (emoji_unicode is None):
self.char_next = NEGATIVE
return None
emoji_file_name = UNICODE_TO_PATH.get(emoji_unicode)
if (emoji_file_name is None):
self.char_next = NEGATIVE
return None
emoji_img = Image.open(os.path.join(self.emoji_folder, emoji_file_name))
return emoji_img<|docstring|>打开emoji图片
Open emoji image<|endoftext|>
|
aa8ef833b80b9c0dc0570838a58bd85c904c9135f32adf33df2b1151c0185079
|
def draw_emoji(self):
'\n 绘制emoji\n Draw emoji\n '
emoji_img = self.get_emoji_img()
if (emoji_img is None):
self.x -= self.font_size
return
if (self.font_size != EMOJI_IMG_SIZE):
emoji_img = emoji_img.resize((self.font_size, self.font_size), Image.ANTIALIAS)
if (emoji_img.mode == 'RGBA'):
(r, g, b, a) = emoji_img.split()
elif (emoji_img.mode == 'LA'):
(l, a) = emoji_img.split()
else:
emoji_img = emoji_img.convert('RGBA')
(r, g, b, a) = emoji_img.split()
self.img.paste(emoji_img, (self.x, (self.y + self.emoji_offset)), mask=a)
return
|
绘制emoji
Draw emoji
|
emoji2pic/main.py
|
draw_emoji
|
sniper-py/emoji2pic-python
| 7
|
python
|
def draw_emoji(self):
'\n 绘制emoji\n Draw emoji\n '
emoji_img = self.get_emoji_img()
if (emoji_img is None):
self.x -= self.font_size
return
if (self.font_size != EMOJI_IMG_SIZE):
emoji_img = emoji_img.resize((self.font_size, self.font_size), Image.ANTIALIAS)
if (emoji_img.mode == 'RGBA'):
(r, g, b, a) = emoji_img.split()
elif (emoji_img.mode == 'LA'):
(l, a) = emoji_img.split()
else:
emoji_img = emoji_img.convert('RGBA')
(r, g, b, a) = emoji_img.split()
self.img.paste(emoji_img, (self.x, (self.y + self.emoji_offset)), mask=a)
return
|
def draw_emoji(self):
'\n 绘制emoji\n Draw emoji\n '
emoji_img = self.get_emoji_img()
if (emoji_img is None):
self.x -= self.font_size
return
if (self.font_size != EMOJI_IMG_SIZE):
emoji_img = emoji_img.resize((self.font_size, self.font_size), Image.ANTIALIAS)
if (emoji_img.mode == 'RGBA'):
(r, g, b, a) = emoji_img.split()
elif (emoji_img.mode == 'LA'):
(l, a) = emoji_img.split()
else:
emoji_img = emoji_img.convert('RGBA')
(r, g, b, a) = emoji_img.split()
self.img.paste(emoji_img, (self.x, (self.y + self.emoji_offset)), mask=a)
return<|docstring|>绘制emoji
Draw emoji<|endoftext|>
|
2fe76981bfa71e6f4e0b41b059d79c2b236ad61d0522462197e049e7381b1dba
|
def combine_img(self):
'\n 合并图片\n Merge image\n '
img_top = self.make_blank_img(img_width=self.img_width, img_height=self.margin_top)
self.img_list.insert(0, img_top)
img_bottom = self.make_blank_img(img_width=self.img_width, img_height=self.margin_bottom)
self.img_list.append(img_bottom)
background_height = ZERO
y = ZERO
for img in self.img_list:
background_height += img.size[1]
background_img = self.make_blank_img(img_width=self.img_width, img_height=background_height)
for img in self.img_list:
if (self.background_color_mode == RGB):
background_img.paste(img, (ZERO, y))
y += img.size[1]
elif (self.background_color_mode == RGBA):
(r, g, b, a) = img.split()
background_img.paste(img, (ZERO, y), mask=a)
y += img.size[1]
return background_img
|
合并图片
Merge image
|
emoji2pic/main.py
|
combine_img
|
sniper-py/emoji2pic-python
| 7
|
python
|
def combine_img(self):
'\n 合并图片\n Merge image\n '
img_top = self.make_blank_img(img_width=self.img_width, img_height=self.margin_top)
self.img_list.insert(0, img_top)
img_bottom = self.make_blank_img(img_width=self.img_width, img_height=self.margin_bottom)
self.img_list.append(img_bottom)
background_height = ZERO
y = ZERO
for img in self.img_list:
background_height += img.size[1]
background_img = self.make_blank_img(img_width=self.img_width, img_height=background_height)
for img in self.img_list:
if (self.background_color_mode == RGB):
background_img.paste(img, (ZERO, y))
y += img.size[1]
elif (self.background_color_mode == RGBA):
(r, g, b, a) = img.split()
background_img.paste(img, (ZERO, y), mask=a)
y += img.size[1]
return background_img
|
def combine_img(self):
'\n 合并图片\n Merge image\n '
img_top = self.make_blank_img(img_width=self.img_width, img_height=self.margin_top)
self.img_list.insert(0, img_top)
img_bottom = self.make_blank_img(img_width=self.img_width, img_height=self.margin_bottom)
self.img_list.append(img_bottom)
background_height = ZERO
y = ZERO
for img in self.img_list:
background_height += img.size[1]
background_img = self.make_blank_img(img_width=self.img_width, img_height=background_height)
for img in self.img_list:
if (self.background_color_mode == RGB):
background_img.paste(img, (ZERO, y))
y += img.size[1]
elif (self.background_color_mode == RGBA):
(r, g, b, a) = img.split()
background_img.paste(img, (ZERO, y), mask=a)
y += img.size[1]
return background_img<|docstring|>合并图片
Merge image<|endoftext|>
|
79ef889818942d5c12a12d9bd96ccfab54ef4fbe1735e65881bd4f97ba0c87f1
|
def make_img(self):
'\n Main program\n '
self.split_paragraph()
self.draw_text()
return self.combine_img()
|
Main program
|
emoji2pic/main.py
|
make_img
|
sniper-py/emoji2pic-python
| 7
|
python
|
def make_img(self):
'\n \n '
self.split_paragraph()
self.draw_text()
return self.combine_img()
|
def make_img(self):
'\n \n '
self.split_paragraph()
self.draw_text()
return self.combine_img()<|docstring|>Main program<|endoftext|>
|
676bee5544ffe1631451748721ac64bbab0e441491a0c5c2c281c54ac19f32b4
|
def get_transit_connections(gtfs, start_time_ut, end_time_ut):
'\n Parameters\n ----------\n gtfs: gtfspy.GTFS\n end_time_ut: int\n start_time_ut: int\n\n Returns\n -------\n list[Connection]\n '
if ((start_time_ut + (20 * 3600)) < end_time_ut):
warn("Note that it is possible that same trip_I's can take place during multiple days, which could (potentially) affect the outcomes of the CSA routing!")
assert isinstance(gtfs, GTFS)
events_df = temporal_network(gtfs, start_time_ut=start_time_ut, end_time_ut=end_time_ut)
assert isinstance(events_df, pandas.DataFrame)
return list(map((lambda e: Connection(e.from_stop_I, e.to_stop_I, e.dep_time_ut, e.arr_time_ut, e.trip_I, e.seq)), events_df.itertuples()))
|
Parameters
----------
gtfs: gtfspy.GTFS
end_time_ut: int
start_time_ut: int
Returns
-------
list[Connection]
|
gtfspy/routing/helpers.py
|
get_transit_connections
|
Leo-Ryu/gtfspy
| 118
|
python
|
def get_transit_connections(gtfs, start_time_ut, end_time_ut):
'\n Parameters\n ----------\n gtfs: gtfspy.GTFS\n end_time_ut: int\n start_time_ut: int\n\n Returns\n -------\n list[Connection]\n '
if ((start_time_ut + (20 * 3600)) < end_time_ut):
warn("Note that it is possible that same trip_I's can take place during multiple days, which could (potentially) affect the outcomes of the CSA routing!")
assert isinstance(gtfs, GTFS)
events_df = temporal_network(gtfs, start_time_ut=start_time_ut, end_time_ut=end_time_ut)
assert isinstance(events_df, pandas.DataFrame)
return list(map((lambda e: Connection(e.from_stop_I, e.to_stop_I, e.dep_time_ut, e.arr_time_ut, e.trip_I, e.seq)), events_df.itertuples()))
|
def get_transit_connections(gtfs, start_time_ut, end_time_ut):
'\n Parameters\n ----------\n gtfs: gtfspy.GTFS\n end_time_ut: int\n start_time_ut: int\n\n Returns\n -------\n list[Connection]\n '
if ((start_time_ut + (20 * 3600)) < end_time_ut):
warn("Note that it is possible that same trip_I's can take place during multiple days, which could (potentially) affect the outcomes of the CSA routing!")
assert isinstance(gtfs, GTFS)
events_df = temporal_network(gtfs, start_time_ut=start_time_ut, end_time_ut=end_time_ut)
assert isinstance(events_df, pandas.DataFrame)
return list(map((lambda e: Connection(e.from_stop_I, e.to_stop_I, e.dep_time_ut, e.arr_time_ut, e.trip_I, e.seq)), events_df.itertuples()))<|docstring|>Parameters
----------
gtfs: gtfspy.GTFS
end_time_ut: int
start_time_ut: int
Returns
-------
list[Connection]<|endoftext|>
|
8fb21da49250f585ee1db228c3383c86ee5455de69ab984714f9f399cefd0bb7
|
def get_walk_network(gtfs, max_link_distance_m=1000):
'\n Parameters\n ----------\n gtfs: gtfspy.GTFS\n\n Returns\n -------\n walk_network: networkx.Graph:\n '
assert isinstance(gtfs, GTFS)
return walk_transfer_stop_to_stop_network(gtfs, max_link_distance=max_link_distance_m)
|
Parameters
----------
gtfs: gtfspy.GTFS
Returns
-------
walk_network: networkx.Graph:
|
gtfspy/routing/helpers.py
|
get_walk_network
|
Leo-Ryu/gtfspy
| 118
|
python
|
def get_walk_network(gtfs, max_link_distance_m=1000):
'\n Parameters\n ----------\n gtfs: gtfspy.GTFS\n\n Returns\n -------\n walk_network: networkx.Graph:\n '
assert isinstance(gtfs, GTFS)
return walk_transfer_stop_to_stop_network(gtfs, max_link_distance=max_link_distance_m)
|
def get_walk_network(gtfs, max_link_distance_m=1000):
'\n Parameters\n ----------\n gtfs: gtfspy.GTFS\n\n Returns\n -------\n walk_network: networkx.Graph:\n '
assert isinstance(gtfs, GTFS)
return walk_transfer_stop_to_stop_network(gtfs, max_link_distance=max_link_distance_m)<|docstring|>Parameters
----------
gtfs: gtfspy.GTFS
Returns
-------
walk_network: networkx.Graph:<|endoftext|>
|
e33bf971b4278c136d521fa65d6847e0c8628d0afde6bbc9d2a30f8ca61b07cb
|
def get_version():
'Returns the version as a string.'
return '.'.join(map(str, VERSION))
|
Returns the version as a string.
|
djangomaat/__init__.py
|
get_version
|
yliharma/django-maat
| 0
|
python
|
def get_version():
return '.'.join(map(str, VERSION))
|
def get_version():
return '.'.join(map(str, VERSION))<|docstring|>Returns the version as a string.<|endoftext|>
|
1b5478467d77a2f5c44246f533f4679ece98f1c896d5729a9a601ac7f329d67e
|
@action(methods=['PATCH'], detail=False)
def refresh(self, request, *args, **kwargs):
'\n API endpoint that allows Actuator data to be refreshed\n '
instance = self.get_object()
valid_refresh = ['all', 'info', 'schema']
refresh = bleach.clean(kwargs.get('refresh', 'info'))
if (instance is not None):
if (refresh not in valid_refresh):
refresh = 'info'
return Response({'refresh': refresh})
|
API endpoint that allows Actuator data to be refreshed
|
orchestrator/core/orc_server/actuator/views/viewsets.py
|
refresh
|
oasis-open/openc2-iacd
| 2
|
python
|
@action(methods=['PATCH'], detail=False)
def refresh(self, request, *args, **kwargs):
'\n \n '
instance = self.get_object()
valid_refresh = ['all', 'info', 'schema']
refresh = bleach.clean(kwargs.get('refresh', 'info'))
if (instance is not None):
if (refresh not in valid_refresh):
refresh = 'info'
return Response({'refresh': refresh})
|
@action(methods=['PATCH'], detail=False)
def refresh(self, request, *args, **kwargs):
'\n \n '
instance = self.get_object()
valid_refresh = ['all', 'info', 'schema']
refresh = bleach.clean(kwargs.get('refresh', 'info'))
if (instance is not None):
if (refresh not in valid_refresh):
refresh = 'info'
return Response({'refresh': refresh})<|docstring|>API endpoint that allows Actuator data to be refreshed<|endoftext|>
|
01f6d7c7b719f67f3c1d4412b815208cdd6afcfacc50d4427185d25b82810ec9
|
@action(methods=['GET'], detail=False)
def profile(self, request, *args, **kwargs):
'\n API endpoint that allows for Actuator profile retrieval\n '
actuator = self.get_object()
if (not request.user.is_staff):
actuator_groups = [g.name for g in ActuatorGroup.objects.filter(actuator=actuator).filter(users__in=[request.user])]
if (len(actuator_groups) == 0):
raise PermissionDenied(detail='User not authorised to access actuator', code=401)
rtn = {'schema': actuator.schema}
return Response(rtn)
|
API endpoint that allows for Actuator profile retrieval
|
orchestrator/core/orc_server/actuator/views/viewsets.py
|
profile
|
oasis-open/openc2-iacd
| 2
|
python
|
@action(methods=['GET'], detail=False)
def profile(self, request, *args, **kwargs):
'\n \n '
actuator = self.get_object()
if (not request.user.is_staff):
actuator_groups = [g.name for g in ActuatorGroup.objects.filter(actuator=actuator).filter(users__in=[request.user])]
if (len(actuator_groups) == 0):
raise PermissionDenied(detail='User not authorised to access actuator', code=401)
rtn = {'schema': actuator.schema}
return Response(rtn)
|
@action(methods=['GET'], detail=False)
def profile(self, request, *args, **kwargs):
'\n \n '
actuator = self.get_object()
if (not request.user.is_staff):
actuator_groups = [g.name for g in ActuatorGroup.objects.filter(actuator=actuator).filter(users__in=[request.user])]
if (len(actuator_groups) == 0):
raise PermissionDenied(detail='User not authorised to access actuator', code=401)
rtn = {'schema': actuator.schema}
return Response(rtn)<|docstring|>API endpoint that allows for Actuator profile retrieval<|endoftext|>
|
8a1f65760ee006da753ff6b816042db446f4c4b3223f00e1161d8b33bca18a66
|
@action(methods=['GET'], detail=False)
def users(self, request, *args, **kwargs):
'\n API endpoint that allows for Actuator user retrieval\n '
actuator = self.get_object()
if (not request.user.is_staff):
actuator_groups = [g.name for g in ActuatorGroup.objects.filter(actuator=actuator).filter(users__in=[request.user])]
if (len(actuator_groups) == 0):
raise PermissionDenied(detail='User not authorised to access actuator', code=401)
group_users = [[u.username for u in ag.users.all()] for ag in ActuatorGroup.objects.filter(actuator=actuator)]
rtn = {'users': sum(group_users, [])}
return Response(rtn)
|
API endpoint that allows for Actuator user retrieval
|
orchestrator/core/orc_server/actuator/views/viewsets.py
|
users
|
oasis-open/openc2-iacd
| 2
|
python
|
@action(methods=['GET'], detail=False)
def users(self, request, *args, **kwargs):
'\n \n '
actuator = self.get_object()
if (not request.user.is_staff):
actuator_groups = [g.name for g in ActuatorGroup.objects.filter(actuator=actuator).filter(users__in=[request.user])]
if (len(actuator_groups) == 0):
raise PermissionDenied(detail='User not authorised to access actuator', code=401)
group_users = [[u.username for u in ag.users.all()] for ag in ActuatorGroup.objects.filter(actuator=actuator)]
rtn = {'users': sum(group_users, [])}
return Response(rtn)
|
@action(methods=['GET'], detail=False)
def users(self, request, *args, **kwargs):
'\n \n '
actuator = self.get_object()
if (not request.user.is_staff):
actuator_groups = [g.name for g in ActuatorGroup.objects.filter(actuator=actuator).filter(users__in=[request.user])]
if (len(actuator_groups) == 0):
raise PermissionDenied(detail='User not authorised to access actuator', code=401)
group_users = [[u.username for u in ag.users.all()] for ag in ActuatorGroup.objects.filter(actuator=actuator)]
rtn = {'users': sum(group_users, [])}
return Response(rtn)<|docstring|>API endpoint that allows for Actuator user retrieval<|endoftext|>
|
2b4e63a77c5da8bf0cbe2c864f587b1c631bf1598f2894a2bc77bfff7391c5ff
|
def get_grad_fn(agent, clip_grad, max_grad=100.0):
' monitor gradient for each sub-component'
params = [p for p in agent.parameters()]
def f():
grad_log = {}
for (n, m) in agent.named_children():
tot_grad = 0
for p in m.parameters():
if (p.grad is not None):
tot_grad += (p.grad.norm(2) ** 2)
tot_grad = (tot_grad ** (1 / 2))
grad_log[('grad_norm' + n)] = tot_grad.item()
grad_norm = clip_grad_norm_([p for p in params if p.requires_grad], clip_grad)
grad_norm = grad_norm.item()
if ((max_grad is not None) and (grad_norm >= max_grad)):
print('WARNING: Exploding Gradients {:.2f}'.format(grad_norm))
grad_norm = max_grad
grad_log['grad_norm'] = grad_norm
return grad_log
return f
|
monitor gradient for each sub-component
|
rl.py
|
get_grad_fn
|
IYIagnus/fast_abs_rl
| 636
|
python
|
def get_grad_fn(agent, clip_grad, max_grad=100.0):
' '
params = [p for p in agent.parameters()]
def f():
grad_log = {}
for (n, m) in agent.named_children():
tot_grad = 0
for p in m.parameters():
if (p.grad is not None):
tot_grad += (p.grad.norm(2) ** 2)
tot_grad = (tot_grad ** (1 / 2))
grad_log[('grad_norm' + n)] = tot_grad.item()
grad_norm = clip_grad_norm_([p for p in params if p.requires_grad], clip_grad)
grad_norm = grad_norm.item()
if ((max_grad is not None) and (grad_norm >= max_grad)):
print('WARNING: Exploding Gradients {:.2f}'.format(grad_norm))
grad_norm = max_grad
grad_log['grad_norm'] = grad_norm
return grad_log
return f
|
def get_grad_fn(agent, clip_grad, max_grad=100.0):
' '
params = [p for p in agent.parameters()]
def f():
grad_log = {}
for (n, m) in agent.named_children():
tot_grad = 0
for p in m.parameters():
if (p.grad is not None):
tot_grad += (p.grad.norm(2) ** 2)
tot_grad = (tot_grad ** (1 / 2))
grad_log[('grad_norm' + n)] = tot_grad.item()
grad_norm = clip_grad_norm_([p for p in params if p.requires_grad], clip_grad)
grad_norm = grad_norm.item()
if ((max_grad is not None) and (grad_norm >= max_grad)):
print('WARNING: Exploding Gradients {:.2f}'.format(grad_norm))
grad_norm = max_grad
grad_log['grad_norm'] = grad_norm
return grad_log
return f<|docstring|>monitor gradient for each sub-component<|endoftext|>
|
24336e353e7bd1b356a32044437cf9531ea74366373156ca435d7e4f16b71856
|
def chroot(path: Tuple[str], oldroot: Tuple[str], newroot: Tuple[str]) -> Tuple[str]:
"\n\tThis is an helper function for Mesh.clone, that given a path,\n\tif it starts with oldroot, it replaces it with newroot.\n\tIf the path is oldroot itself, it is not changed.\n\t\n\t>>> chroot(('a', 'b', 'c'), ('a', 'b'), ('x', 'y', 'z'))\n\t('x', 'y', 'z', 'c')\n\t>>> chroot(('k', 'y', 's'), (), ('u', 'r'))\n\t('u', 'r', 'k', 'y', 's')\n\t>>> chroot(('x', 'y', 'z'), ('x', 'y'), ())\n\t('z',)\n\t>>> chroot(('a', 'b'), ('a', 'b'), ('c', 'd'))\n\t('a', 'b')\n\t>>> chroot(('x', 'y', 'z'), ('a', 'b'), ('c', 'd'))\n\t('x', 'y', 'z')\n\t"
if ((path[:len(oldroot)] == oldroot) and (path != oldroot)):
return (newroot + path[len(oldroot):])
return path
|
This is an helper function for Mesh.clone, that given a path,
if it starts with oldroot, it replaces it with newroot.
If the path is oldroot itself, it is not changed.
>>> chroot(('a', 'b', 'c'), ('a', 'b'), ('x', 'y', 'z'))
('x', 'y', 'z', 'c')
>>> chroot(('k', 'y', 's'), (), ('u', 'r'))
('u', 'r', 'k', 'y', 's')
>>> chroot(('x', 'y', 'z'), ('x', 'y'), ())
('z',)
>>> chroot(('a', 'b'), ('a', 'b'), ('c', 'd'))
('a', 'b')
>>> chroot(('x', 'y', 'z'), ('a', 'b'), ('c', 'd'))
('x', 'y', 'z')
|
nylo/mesh.py
|
chroot
|
VeggeroNylo/nylo
| 18
|
python
|
def chroot(path: Tuple[str], oldroot: Tuple[str], newroot: Tuple[str]) -> Tuple[str]:
"\n\tThis is an helper function for Mesh.clone, that given a path,\n\tif it starts with oldroot, it replaces it with newroot.\n\tIf the path is oldroot itself, it is not changed.\n\t\n\t>>> chroot(('a', 'b', 'c'), ('a', 'b'), ('x', 'y', 'z'))\n\t('x', 'y', 'z', 'c')\n\t>>> chroot(('k', 'y', 's'), (), ('u', 'r'))\n\t('u', 'r', 'k', 'y', 's')\n\t>>> chroot(('x', 'y', 'z'), ('x', 'y'), ())\n\t('z',)\n\t>>> chroot(('a', 'b'), ('a', 'b'), ('c', 'd'))\n\t('a', 'b')\n\t>>> chroot(('x', 'y', 'z'), ('a', 'b'), ('c', 'd'))\n\t('x', 'y', 'z')\n\t"
if ((path[:len(oldroot)] == oldroot) and (path != oldroot)):
return (newroot + path[len(oldroot):])
return path
|
def chroot(path: Tuple[str], oldroot: Tuple[str], newroot: Tuple[str]) -> Tuple[str]:
"\n\tThis is an helper function for Mesh.clone, that given a path,\n\tif it starts with oldroot, it replaces it with newroot.\n\tIf the path is oldroot itself, it is not changed.\n\t\n\t>>> chroot(('a', 'b', 'c'), ('a', 'b'), ('x', 'y', 'z'))\n\t('x', 'y', 'z', 'c')\n\t>>> chroot(('k', 'y', 's'), (), ('u', 'r'))\n\t('u', 'r', 'k', 'y', 's')\n\t>>> chroot(('x', 'y', 'z'), ('x', 'y'), ())\n\t('z',)\n\t>>> chroot(('a', 'b'), ('a', 'b'), ('c', 'd'))\n\t('a', 'b')\n\t>>> chroot(('x', 'y', 'z'), ('a', 'b'), ('c', 'd'))\n\t('x', 'y', 'z')\n\t"
if ((path[:len(oldroot)] == oldroot) and (path != oldroot)):
return (newroot + path[len(oldroot):])
return path<|docstring|>This is an helper function for Mesh.clone, that given a path,
if it starts with oldroot, it replaces it with newroot.
If the path is oldroot itself, it is not changed.
>>> chroot(('a', 'b', 'c'), ('a', 'b'), ('x', 'y', 'z'))
('x', 'y', 'z', 'c')
>>> chroot(('k', 'y', 's'), (), ('u', 'r'))
('u', 'r', 'k', 'y', 's')
>>> chroot(('x', 'y', 'z'), ('x', 'y'), ())
('z',)
>>> chroot(('a', 'b'), ('a', 'b'), ('c', 'd'))
('a', 'b')
>>> chroot(('x', 'y', 'z'), ('a', 'b'), ('c', 'd'))
('x', 'y', 'z')<|endoftext|>
|
d6a009e02d079bb96f13490820d14dbb65aa66df70c1f73200ba2eedd711689c
|
def bind(self):
"\n\t\tThis binds all the variable in the mesh.\n\t\tAt the beginning, all the variable to bind are put in\n\t\tthis notation: (context_path, referring_path). In order to \n\t\tdo this, it look for all the values (different from None),\n\t\tand takes the first element from the path, as that is\n\t\tthe variable we are referring to, and trying to get it\n\t\tproprieties.\n\t\te.g.: from `a.b.c` that becomes `('a', 'b', 'c')`, it takes\n\t\tjust `a`.\n\t\tNow, we search for that value inside the mesh, in order\n\t\tto bind it. If we had something like:\n\t\t`(candy: 1, or: (candy: 2, uh: candy))`\n\t\tThe `candy` variable (that is `('candy',)` ) is referring to\n\t\tthe 2 value and not the 1. This is because we start searching\n\t\tfrom the most inner to the outer. The starting path is given\n\t\tfrom the context, so if we have\n\t\t`(('x', 'y', 'z',), ('a', 'b'))`\n\t\tWe will search for, in this order:\n\t\t`('x', 'y', 'z', 'a')`\n\t\t`('x', 'y', 'a')`\n\t\t`('x', 'a')`\n\t\t`('a',)`\n\t\tIf none of this variable exists, an expection will be raised.\n\t\tA special case is same, the only built-in function.\n\t\t\n\t\t>>> m = Mesh({\n\t\t... ('a',): None,\n\t\t... ('x',): (('x',), ('a',)),\n\t\t... ('k',): (('k',), ('same',))\n\t\t... })\n\t\t>>> m.bind()\n\t\t>>> m[('x',)]\n\t\t('a',)\n\t\t>>> m[('k',)]\n\t\t('same',)\n\t\t\n\t\t>>> m = Mesh({\n\t\t... ('a', 'b', 'c'): None,\n\t\t... ('a', 'b', 'x'): (('a', 'b', 'x'), ('c', 'd'))\n\t\t... })\n\t\t>>> m.bind()\n\t\t>>> m[('a', 'b', 'x')]\n\t\t('a', 'b', 'c', 'd')\n\t\t\n\t\t>>> m = Mesh({\n\t\t... ('a', 'f'): None,\n\t\t... ('x', 'y'): (('a', 'b', 'c', 'd'), ('f', 'y', 'i'))\n\t\t... })\n\t\t>>> m.bind()\n\t\t>>> m[('x', 'y')]\n\t\t('a', 'f', 'y', 'i')\n\t\t\n\t\t>>> m = Mesh({\n\t\t... ('a', 'p'): None,\n\t\t... ('x', 'y'): (('a', 'b', 'c', 'd'), ('f', 'y', 'i'))\n\t\t... })\n\t\t>>> m.bind()\n\t\tTraceback (most recent call last):\n\t\t\t...\n\t\tSyntaxError: Name 'f' is not defined.\n\t\t"
self[('same',)] = None
for (key, value) in self.items():
if (value is None):
continue
(context, (var, *propr)) = value
for i in reversed(range((len(context) + 1))):
possible = (context[:i] + (var,))
if (possible in self):
self[key] = (possible + tuple(propr))
break
else:
raise SyntaxError(f'Name {var!r} is not defined in {key!r}.')
|
This binds all the variable in the mesh.
At the beginning, all the variable to bind are put in
this notation: (context_path, referring_path). In order to
do this, it look for all the values (different from None),
and takes the first element from the path, as that is
the variable we are referring to, and trying to get it
proprieties.
e.g.: from `a.b.c` that becomes `('a', 'b', 'c')`, it takes
just `a`.
Now, we search for that value inside the mesh, in order
to bind it. If we had something like:
`(candy: 1, or: (candy: 2, uh: candy))`
The `candy` variable (that is `('candy',)` ) is referring to
the 2 value and not the 1. This is because we start searching
from the most inner to the outer. The starting path is given
from the context, so if we have
`(('x', 'y', 'z',), ('a', 'b'))`
We will search for, in this order:
`('x', 'y', 'z', 'a')`
`('x', 'y', 'a')`
`('x', 'a')`
`('a',)`
If none of this variable exists, an expection will be raised.
A special case is same, the only built-in function.
>>> m = Mesh({
... ('a',): None,
... ('x',): (('x',), ('a',)),
... ('k',): (('k',), ('same',))
... })
>>> m.bind()
>>> m[('x',)]
('a',)
>>> m[('k',)]
('same',)
>>> m = Mesh({
... ('a', 'b', 'c'): None,
... ('a', 'b', 'x'): (('a', 'b', 'x'), ('c', 'd'))
... })
>>> m.bind()
>>> m[('a', 'b', 'x')]
('a', 'b', 'c', 'd')
>>> m = Mesh({
... ('a', 'f'): None,
... ('x', 'y'): (('a', 'b', 'c', 'd'), ('f', 'y', 'i'))
... })
>>> m.bind()
>>> m[('x', 'y')]
('a', 'f', 'y', 'i')
>>> m = Mesh({
... ('a', 'p'): None,
... ('x', 'y'): (('a', 'b', 'c', 'd'), ('f', 'y', 'i'))
... })
>>> m.bind()
Traceback (most recent call last):
...
SyntaxError: Name 'f' is not defined.
|
nylo/mesh.py
|
bind
|
VeggeroNylo/nylo
| 18
|
python
|
def bind(self):
"\n\t\tThis binds all the variable in the mesh.\n\t\tAt the beginning, all the variable to bind are put in\n\t\tthis notation: (context_path, referring_path). In order to \n\t\tdo this, it look for all the values (different from None),\n\t\tand takes the first element from the path, as that is\n\t\tthe variable we are referring to, and trying to get it\n\t\tproprieties.\n\t\te.g.: from `a.b.c` that becomes `('a', 'b', 'c')`, it takes\n\t\tjust `a`.\n\t\tNow, we search for that value inside the mesh, in order\n\t\tto bind it. If we had something like:\n\t\t`(candy: 1, or: (candy: 2, uh: candy))`\n\t\tThe `candy` variable (that is `('candy',)` ) is referring to\n\t\tthe 2 value and not the 1. This is because we start searching\n\t\tfrom the most inner to the outer. The starting path is given\n\t\tfrom the context, so if we have\n\t\t`(('x', 'y', 'z',), ('a', 'b'))`\n\t\tWe will search for, in this order:\n\t\t`('x', 'y', 'z', 'a')`\n\t\t`('x', 'y', 'a')`\n\t\t`('x', 'a')`\n\t\t`('a',)`\n\t\tIf none of this variable exists, an expection will be raised.\n\t\tA special case is same, the only built-in function.\n\t\t\n\t\t>>> m = Mesh({\n\t\t... ('a',): None,\n\t\t... ('x',): (('x',), ('a',)),\n\t\t... ('k',): (('k',), ('same',))\n\t\t... })\n\t\t>>> m.bind()\n\t\t>>> m[('x',)]\n\t\t('a',)\n\t\t>>> m[('k',)]\n\t\t('same',)\n\t\t\n\t\t>>> m = Mesh({\n\t\t... ('a', 'b', 'c'): None,\n\t\t... ('a', 'b', 'x'): (('a', 'b', 'x'), ('c', 'd'))\n\t\t... })\n\t\t>>> m.bind()\n\t\t>>> m[('a', 'b', 'x')]\n\t\t('a', 'b', 'c', 'd')\n\t\t\n\t\t>>> m = Mesh({\n\t\t... ('a', 'f'): None,\n\t\t... ('x', 'y'): (('a', 'b', 'c', 'd'), ('f', 'y', 'i'))\n\t\t... })\n\t\t>>> m.bind()\n\t\t>>> m[('x', 'y')]\n\t\t('a', 'f', 'y', 'i')\n\t\t\n\t\t>>> m = Mesh({\n\t\t... ('a', 'p'): None,\n\t\t... ('x', 'y'): (('a', 'b', 'c', 'd'), ('f', 'y', 'i'))\n\t\t... })\n\t\t>>> m.bind()\n\t\tTraceback (most recent call last):\n\t\t\t...\n\t\tSyntaxError: Name 'f' is not defined.\n\t\t"
self[('same',)] = None
for (key, value) in self.items():
if (value is None):
continue
(context, (var, *propr)) = value
for i in reversed(range((len(context) + 1))):
possible = (context[:i] + (var,))
if (possible in self):
self[key] = (possible + tuple(propr))
break
else:
raise SyntaxError(f'Name {var!r} is not defined in {key!r}.')
|
def bind(self):
"\n\t\tThis binds all the variable in the mesh.\n\t\tAt the beginning, all the variable to bind are put in\n\t\tthis notation: (context_path, referring_path). In order to \n\t\tdo this, it look for all the values (different from None),\n\t\tand takes the first element from the path, as that is\n\t\tthe variable we are referring to, and trying to get it\n\t\tproprieties.\n\t\te.g.: from `a.b.c` that becomes `('a', 'b', 'c')`, it takes\n\t\tjust `a`.\n\t\tNow, we search for that value inside the mesh, in order\n\t\tto bind it. If we had something like:\n\t\t`(candy: 1, or: (candy: 2, uh: candy))`\n\t\tThe `candy` variable (that is `('candy',)` ) is referring to\n\t\tthe 2 value and not the 1. This is because we start searching\n\t\tfrom the most inner to the outer. The starting path is given\n\t\tfrom the context, so if we have\n\t\t`(('x', 'y', 'z',), ('a', 'b'))`\n\t\tWe will search for, in this order:\n\t\t`('x', 'y', 'z', 'a')`\n\t\t`('x', 'y', 'a')`\n\t\t`('x', 'a')`\n\t\t`('a',)`\n\t\tIf none of this variable exists, an expection will be raised.\n\t\tA special case is same, the only built-in function.\n\t\t\n\t\t>>> m = Mesh({\n\t\t... ('a',): None,\n\t\t... ('x',): (('x',), ('a',)),\n\t\t... ('k',): (('k',), ('same',))\n\t\t... })\n\t\t>>> m.bind()\n\t\t>>> m[('x',)]\n\t\t('a',)\n\t\t>>> m[('k',)]\n\t\t('same',)\n\t\t\n\t\t>>> m = Mesh({\n\t\t... ('a', 'b', 'c'): None,\n\t\t... ('a', 'b', 'x'): (('a', 'b', 'x'), ('c', 'd'))\n\t\t... })\n\t\t>>> m.bind()\n\t\t>>> m[('a', 'b', 'x')]\n\t\t('a', 'b', 'c', 'd')\n\t\t\n\t\t>>> m = Mesh({\n\t\t... ('a', 'f'): None,\n\t\t... ('x', 'y'): (('a', 'b', 'c', 'd'), ('f', 'y', 'i'))\n\t\t... })\n\t\t>>> m.bind()\n\t\t>>> m[('x', 'y')]\n\t\t('a', 'f', 'y', 'i')\n\t\t\n\t\t>>> m = Mesh({\n\t\t... ('a', 'p'): None,\n\t\t... ('x', 'y'): (('a', 'b', 'c', 'd'), ('f', 'y', 'i'))\n\t\t... })\n\t\t>>> m.bind()\n\t\tTraceback (most recent call last):\n\t\t\t...\n\t\tSyntaxError: Name 'f' is not defined.\n\t\t"
self[('same',)] = None
for (key, value) in self.items():
if (value is None):
continue
(context, (var, *propr)) = value
for i in reversed(range((len(context) + 1))):
possible = (context[:i] + (var,))
if (possible in self):
self[key] = (possible + tuple(propr))
break
else:
raise SyntaxError(f'Name {var!r} is not defined in {key!r}.')<|docstring|>This binds all the variable in the mesh.
At the beginning, all the variable to bind are put in
this notation: (context_path, referring_path). In order to
do this, it look for all the values (different from None),
and takes the first element from the path, as that is
the variable we are referring to, and trying to get it
proprieties.
e.g.: from `a.b.c` that becomes `('a', 'b', 'c')`, it takes
just `a`.
Now, we search for that value inside the mesh, in order
to bind it. If we had something like:
`(candy: 1, or: (candy: 2, uh: candy))`
The `candy` variable (that is `('candy',)` ) is referring to
the 2 value and not the 1. This is because we start searching
from the most inner to the outer. The starting path is given
from the context, so if we have
`(('x', 'y', 'z',), ('a', 'b'))`
We will search for, in this order:
`('x', 'y', 'z', 'a')`
`('x', 'y', 'a')`
`('x', 'a')`
`('a',)`
If none of this variable exists, an expection will be raised.
A special case is same, the only built-in function.
>>> m = Mesh({
... ('a',): None,
... ('x',): (('x',), ('a',)),
... ('k',): (('k',), ('same',))
... })
>>> m.bind()
>>> m[('x',)]
('a',)
>>> m[('k',)]
('same',)
>>> m = Mesh({
... ('a', 'b', 'c'): None,
... ('a', 'b', 'x'): (('a', 'b', 'x'), ('c', 'd'))
... })
>>> m.bind()
>>> m[('a', 'b', 'x')]
('a', 'b', 'c', 'd')
>>> m = Mesh({
... ('a', 'f'): None,
... ('x', 'y'): (('a', 'b', 'c', 'd'), ('f', 'y', 'i'))
... })
>>> m.bind()
>>> m[('x', 'y')]
('a', 'f', 'y', 'i')
>>> m = Mesh({
... ('a', 'p'): None,
... ('x', 'y'): (('a', 'b', 'c', 'd'), ('f', 'y', 'i'))
... })
>>> m.bind()
Traceback (most recent call last):
...
SyntaxError: Name 'f' is not defined.<|endoftext|>
|
62170b43c9472a926ed2bdaf8cdb84d0a5ca16a3cf43ab1c4240923404117a7d
|
def valueof(self, path: Tuple[str], done=()):
"\n\t\tThis method returns the value of a path. The difference\n\t\tbetween this and the get method is that if the value\n\t\tis not in the dictionary, valueof will check if\n\t\tit's a propriety of another value. E.g., if you have\n\t\t`a.x`, maybe `a.x` is not in the dictionary, but\n\t\tmaybe `x` is defined the `a` class.\n\t\tIn order to do this, if the path (Tuple[str]) is\n\t\tnot in the dictionary, it will remove the last\n\t\telements until it finds a value that exists and\n\t\tis different from None, e.g.:\n\t\t`('a', 'b', 'c')`, then `('a', 'b')`, then `('a',)` then `()`.\n\t\tIf found, it will call the chroot method, in order\n\t\tto transfer the proprieties from the object the found\n\t\tpath is referring to to the found path itself. E.g.,\n\t\tif `('a', 'b')` is a path that refers to `('fib',)`\n\t\t`Mesh.chroot(('fib',), ('a', 'b'))` will be called.\n\t\tIf even after the chroot the value still does not exist,\n\t\tit will go on, raising an error after `()`.\n\t\tAlso, if the value to return is a path to another\n\t\tobject, it will return the Mesh.valueof(that_path) instead.\n\t\tFinally, if the value is None, the path itself is returned.\n\t\tThe done argument represent the already cloned values, in\n\t\torder to avoid them cloning forever.\n\t\t\n\t\t>>> m = Mesh({\n\t\t... ('a',): None,\n\t\t... ('a', 'k'): None,\n\t\t... ('a', 'k', 'x'): None,\n\t\t... ('b',): ('a',),\n\t\t... ('c',): ('b', 'k'),\n\t\t... ('e',): ('b',),\n\t\t... ('f',): ('e',),\n\t\t... ('g',): ('f', 'k')\n\t\t... })\n\t\t>>> m.valueof(('a',))\n\t\t('a',)\n\t\t>>> m.valueof(('a', 'k'))\n\t\t('a', 'k')\n\t\t>>> m.valueof(('b',))\n\t\t('a',)\n\t\t>>> m.valueof(('c',))\n\t\t('b', 'k')\n\t\t>>> m.valueof(('e',))\n\t\t('a',)\n\t\t>>> m.valueof(('d',))\n\t\tTraceback (most recent call last):\n\t\t\t...\n\t\tSyntaxError: Name 'd' is not defined.\n\t\t>>> m.valueof(('b', 'n'))\n\t\tTraceback (most recent call last):\n\t\t\t...\n\t\tSyntaxError: Name 'b.n' is not defined.\n\t\t>>> m.valueof(('g', 'x'))\n\t\t('g', 'x')\n\t\t>>> m.valueof(('g',))\n\t\t('f', 'k')\n\t\t>>> m.valueof(('f', 'k', 'x'))\n\t\t('f', 'k', 'x')\n\t\t"
if (path in self):
if isinstance(self[path], tuple):
return self.valueof(self[path])
assert (self[path] is None)
return path
for i in reversed(range(len(path))):
subpath = path[:i]
if ((not (subpath in self)) or (self[subpath] is None)):
continue
if ((self[subpath], subpath) in done):
continue
oldvalue = self[subpath]
done += ((oldvalue, subpath),)
self.clone(self[subpath], subpath, done)
return self.valueof(path, done)
raise SyntaxError(f'Name {path!r} is not defined.')
|
This method returns the value of a path. The difference
between this and the get method is that if the value
is not in the dictionary, valueof will check if
it's a propriety of another value. E.g., if you have
`a.x`, maybe `a.x` is not in the dictionary, but
maybe `x` is defined the `a` class.
In order to do this, if the path (Tuple[str]) is
not in the dictionary, it will remove the last
elements until it finds a value that exists and
is different from None, e.g.:
`('a', 'b', 'c')`, then `('a', 'b')`, then `('a',)` then `()`.
If found, it will call the chroot method, in order
to transfer the proprieties from the object the found
path is referring to to the found path itself. E.g.,
if `('a', 'b')` is a path that refers to `('fib',)`
`Mesh.chroot(('fib',), ('a', 'b'))` will be called.
If even after the chroot the value still does not exist,
it will go on, raising an error after `()`.
Also, if the value to return is a path to another
object, it will return the Mesh.valueof(that_path) instead.
Finally, if the value is None, the path itself is returned.
The done argument represent the already cloned values, in
order to avoid them cloning forever.
>>> m = Mesh({
... ('a',): None,
... ('a', 'k'): None,
... ('a', 'k', 'x'): None,
... ('b',): ('a',),
... ('c',): ('b', 'k'),
... ('e',): ('b',),
... ('f',): ('e',),
... ('g',): ('f', 'k')
... })
>>> m.valueof(('a',))
('a',)
>>> m.valueof(('a', 'k'))
('a', 'k')
>>> m.valueof(('b',))
('a',)
>>> m.valueof(('c',))
('b', 'k')
>>> m.valueof(('e',))
('a',)
>>> m.valueof(('d',))
Traceback (most recent call last):
...
SyntaxError: Name 'd' is not defined.
>>> m.valueof(('b', 'n'))
Traceback (most recent call last):
...
SyntaxError: Name 'b.n' is not defined.
>>> m.valueof(('g', 'x'))
('g', 'x')
>>> m.valueof(('g',))
('f', 'k')
>>> m.valueof(('f', 'k', 'x'))
('f', 'k', 'x')
|
nylo/mesh.py
|
valueof
|
VeggeroNylo/nylo
| 18
|
python
|
def valueof(self, path: Tuple[str], done=()):
"\n\t\tThis method returns the value of a path. The difference\n\t\tbetween this and the get method is that if the value\n\t\tis not in the dictionary, valueof will check if\n\t\tit's a propriety of another value. E.g., if you have\n\t\t`a.x`, maybe `a.x` is not in the dictionary, but\n\t\tmaybe `x` is defined the `a` class.\n\t\tIn order to do this, if the path (Tuple[str]) is\n\t\tnot in the dictionary, it will remove the last\n\t\telements until it finds a value that exists and\n\t\tis different from None, e.g.:\n\t\t`('a', 'b', 'c')`, then `('a', 'b')`, then `('a',)` then `()`.\n\t\tIf found, it will call the chroot method, in order\n\t\tto transfer the proprieties from the object the found\n\t\tpath is referring to to the found path itself. E.g.,\n\t\tif `('a', 'b')` is a path that refers to `('fib',)`\n\t\t`Mesh.chroot(('fib',), ('a', 'b'))` will be called.\n\t\tIf even after the chroot the value still does not exist,\n\t\tit will go on, raising an error after `()`.\n\t\tAlso, if the value to return is a path to another\n\t\tobject, it will return the Mesh.valueof(that_path) instead.\n\t\tFinally, if the value is None, the path itself is returned.\n\t\tThe done argument represent the already cloned values, in\n\t\torder to avoid them cloning forever.\n\t\t\n\t\t>>> m = Mesh({\n\t\t... ('a',): None,\n\t\t... ('a', 'k'): None,\n\t\t... ('a', 'k', 'x'): None,\n\t\t... ('b',): ('a',),\n\t\t... ('c',): ('b', 'k'),\n\t\t... ('e',): ('b',),\n\t\t... ('f',): ('e',),\n\t\t... ('g',): ('f', 'k')\n\t\t... })\n\t\t>>> m.valueof(('a',))\n\t\t('a',)\n\t\t>>> m.valueof(('a', 'k'))\n\t\t('a', 'k')\n\t\t>>> m.valueof(('b',))\n\t\t('a',)\n\t\t>>> m.valueof(('c',))\n\t\t('b', 'k')\n\t\t>>> m.valueof(('e',))\n\t\t('a',)\n\t\t>>> m.valueof(('d',))\n\t\tTraceback (most recent call last):\n\t\t\t...\n\t\tSyntaxError: Name 'd' is not defined.\n\t\t>>> m.valueof(('b', 'n'))\n\t\tTraceback (most recent call last):\n\t\t\t...\n\t\tSyntaxError: Name 'b.n' is not defined.\n\t\t>>> m.valueof(('g', 'x'))\n\t\t('g', 'x')\n\t\t>>> m.valueof(('g',))\n\t\t('f', 'k')\n\t\t>>> m.valueof(('f', 'k', 'x'))\n\t\t('f', 'k', 'x')\n\t\t"
if (path in self):
if isinstance(self[path], tuple):
return self.valueof(self[path])
assert (self[path] is None)
return path
for i in reversed(range(len(path))):
subpath = path[:i]
if ((not (subpath in self)) or (self[subpath] is None)):
continue
if ((self[subpath], subpath) in done):
continue
oldvalue = self[subpath]
done += ((oldvalue, subpath),)
self.clone(self[subpath], subpath, done)
return self.valueof(path, done)
raise SyntaxError(f'Name {path!r} is not defined.')
|
def valueof(self, path: Tuple[str], done=()):
"\n\t\tThis method returns the value of a path. The difference\n\t\tbetween this and the get method is that if the value\n\t\tis not in the dictionary, valueof will check if\n\t\tit's a propriety of another value. E.g., if you have\n\t\t`a.x`, maybe `a.x` is not in the dictionary, but\n\t\tmaybe `x` is defined the `a` class.\n\t\tIn order to do this, if the path (Tuple[str]) is\n\t\tnot in the dictionary, it will remove the last\n\t\telements until it finds a value that exists and\n\t\tis different from None, e.g.:\n\t\t`('a', 'b', 'c')`, then `('a', 'b')`, then `('a',)` then `()`.\n\t\tIf found, it will call the chroot method, in order\n\t\tto transfer the proprieties from the object the found\n\t\tpath is referring to to the found path itself. E.g.,\n\t\tif `('a', 'b')` is a path that refers to `('fib',)`\n\t\t`Mesh.chroot(('fib',), ('a', 'b'))` will be called.\n\t\tIf even after the chroot the value still does not exist,\n\t\tit will go on, raising an error after `()`.\n\t\tAlso, if the value to return is a path to another\n\t\tobject, it will return the Mesh.valueof(that_path) instead.\n\t\tFinally, if the value is None, the path itself is returned.\n\t\tThe done argument represent the already cloned values, in\n\t\torder to avoid them cloning forever.\n\t\t\n\t\t>>> m = Mesh({\n\t\t... ('a',): None,\n\t\t... ('a', 'k'): None,\n\t\t... ('a', 'k', 'x'): None,\n\t\t... ('b',): ('a',),\n\t\t... ('c',): ('b', 'k'),\n\t\t... ('e',): ('b',),\n\t\t... ('f',): ('e',),\n\t\t... ('g',): ('f', 'k')\n\t\t... })\n\t\t>>> m.valueof(('a',))\n\t\t('a',)\n\t\t>>> m.valueof(('a', 'k'))\n\t\t('a', 'k')\n\t\t>>> m.valueof(('b',))\n\t\t('a',)\n\t\t>>> m.valueof(('c',))\n\t\t('b', 'k')\n\t\t>>> m.valueof(('e',))\n\t\t('a',)\n\t\t>>> m.valueof(('d',))\n\t\tTraceback (most recent call last):\n\t\t\t...\n\t\tSyntaxError: Name 'd' is not defined.\n\t\t>>> m.valueof(('b', 'n'))\n\t\tTraceback (most recent call last):\n\t\t\t...\n\t\tSyntaxError: Name 'b.n' is not defined.\n\t\t>>> m.valueof(('g', 'x'))\n\t\t('g', 'x')\n\t\t>>> m.valueof(('g',))\n\t\t('f', 'k')\n\t\t>>> m.valueof(('f', 'k', 'x'))\n\t\t('f', 'k', 'x')\n\t\t"
if (path in self):
if isinstance(self[path], tuple):
return self.valueof(self[path])
assert (self[path] is None)
return path
for i in reversed(range(len(path))):
subpath = path[:i]
if ((not (subpath in self)) or (self[subpath] is None)):
continue
if ((self[subpath], subpath) in done):
continue
oldvalue = self[subpath]
done += ((oldvalue, subpath),)
self.clone(self[subpath], subpath, done)
return self.valueof(path, done)
raise SyntaxError(f'Name {path!r} is not defined.')<|docstring|>This method returns the value of a path. The difference
between this and the get method is that if the value
is not in the dictionary, valueof will check if
it's a propriety of another value. E.g., if you have
`a.x`, maybe `a.x` is not in the dictionary, but
maybe `x` is defined the `a` class.
In order to do this, if the path (Tuple[str]) is
not in the dictionary, it will remove the last
elements until it finds a value that exists and
is different from None, e.g.:
`('a', 'b', 'c')`, then `('a', 'b')`, then `('a',)` then `()`.
If found, it will call the chroot method, in order
to transfer the proprieties from the object the found
path is referring to to the found path itself. E.g.,
if `('a', 'b')` is a path that refers to `('fib',)`
`Mesh.chroot(('fib',), ('a', 'b'))` will be called.
If even after the chroot the value still does not exist,
it will go on, raising an error after `()`.
Also, if the value to return is a path to another
object, it will return the Mesh.valueof(that_path) instead.
Finally, if the value is None, the path itself is returned.
The done argument represent the already cloned values, in
order to avoid them cloning forever.
>>> m = Mesh({
... ('a',): None,
... ('a', 'k'): None,
... ('a', 'k', 'x'): None,
... ('b',): ('a',),
... ('c',): ('b', 'k'),
... ('e',): ('b',),
... ('f',): ('e',),
... ('g',): ('f', 'k')
... })
>>> m.valueof(('a',))
('a',)
>>> m.valueof(('a', 'k'))
('a', 'k')
>>> m.valueof(('b',))
('a',)
>>> m.valueof(('c',))
('b', 'k')
>>> m.valueof(('e',))
('a',)
>>> m.valueof(('d',))
Traceback (most recent call last):
...
SyntaxError: Name 'd' is not defined.
>>> m.valueof(('b', 'n'))
Traceback (most recent call last):
...
SyntaxError: Name 'b.n' is not defined.
>>> m.valueof(('g', 'x'))
('g', 'x')
>>> m.valueof(('g',))
('f', 'k')
>>> m.valueof(('f', 'k', 'x'))
('f', 'k', 'x')<|endoftext|>
|
3427510dc914bf11ecb907b45d73ac9a8a2b7c0395041eaa3cbab64647cc284e
|
def clone(self, oldroot: Tuple[str], newroot: Tuple[str], done=()):
"\n\t\tThis function clones all the values in the dictionary\n\t\twhere the keys starts with oldroot to the same\n\t\tpath but with oldroot replaced with newroot, also\n\t\tchanging the root in the value if it is a path.\n\t\tThere are a couple of exception: \n\t\t- If the path does not start with oldroot but it *is* \n\t\toldroot itself, it is cloned to newpath only if the\n\t\tvalue is not None.\n\t\t- If the value of a path does not start with oldroot\n\t\tbut it *is* oldroot itself, it is not changed.\n\t\t- If the path ends with ('self',), and the value is\n\t\toldroot itself, the values is changed to newpath.\n\t\t- If the path, after changing the oldroot with the newroot,\n\t\talready exists and is not None, that value is not cloned,\n\t\tand the old one is preserved.\n\t\t\n\t\t>>> m = Mesh({\n\t\t...\t ('fib', 'n'): ('nat',),\n\t\t... ('fib', 'prev'): ('fib', 'n'),\n\t\t... ('fib',): ('fib', 'prev'),\n\t\t... ('fib', 'self'): ('fib',),\n\t\t... ('fib', 'call'): ('fib',),\n\t\t... ('fib', 'none'): None,\n\t\t...\n\t\t... ('tgt',): ('fib',),\n\t\t... ('tgt', 'n'): ('k',)\n\t\t... })\n\t\t>>> m.clone(('fib',), ('tgt',))\n\t\t>>> m[('tgt',)]\n\t\t('fib', 'prev')\n\t\t>>> m[('tgt', 'n')]\n\t\t('k',)\n\t\t>>> m[('tgt', 'prev')]\n\t\t('tgt', 'n')\n\t\t>>> m[('tgt', 'self')]\n\t\t('tgt',)\n\t\t>>> m[('tgt', 'call')]\n\t\t('fib',)\n\t\t>>> m[('tgt', 'none')] #(None\t)\n\t\t\n\t\t>>> m.clone(('fib', 'none'), ('tgt',))\n\t\t>>> m[('tgt',)]\n\t\t('fib', 'prev')\n\t\t\n\t\tA special case is cloning from ('same'). That is the\n\t\tonly built-in function. When cloning from it, this function\n\t\twill check if the Mesh.valueof(newpath+('first',)) is the\n\t\tsame of newpath+('second',). If so, ('same', 'self') will have\n\t\tvalue ('same', 'then'), else ('same', 'else').\n\t\t\n\t\t>>> m = Mesh({\n\t\t... ('same',): None,\n\t\t... ('success',): None,\n\t\t... ('fail',): None,\n\t\t... ('c',): None,\n\t\t... ('b',): ('c',),\n\t\t... ('a',): ('same',),\n\t\t... ('a', 'first'): ('b',),\n\t\t... ('a', 'second'): ('c',),\n\t\t... ('a', 'then'): ('success',),\n\t\t... ('a', 'else'): ('fail',)\n\t\t... })\n\t\t>>> m.clone(('same',), ('a',))\n\t\t>>> m[('a', 'self')]\n\t\t('a', 'then')\n\t\t>>> m.valueof(('a', 'self'))\n\t\t('success',)\n\t\t"
delta = {}
selfpath = (oldroot + ('self',))
if (not (oldroot in self)):
self.valueof(oldroot, done)
blockeds = set()
for (key, value) in sorted(self.items(), key=(lambda x: x[0])):
newkey = chroot(key, oldroot, newroot)
if any(((key[:len(b)] == b) for b in blockeds)):
continue
if (newkey == key):
continue
if (not ((newkey in self) and (self[newkey] is not None))):
newval = (chroot(value, oldroot, newroot) if (not (value is None)) else None)
delta[newkey] = newval
else:
blockeds.add(key)
if ((oldroot in self) and self[oldroot]):
delta[newroot] = self[oldroot]
if (oldroot == ('same',)):
delta[(newroot + ('self',))] = (newroot + (('then',) if (self.valueof((newroot + ('first',)), done) == self.valueof((newroot + ('second',)), done)) else ('else',)))
if ((selfpath in self) and (self[selfpath] == oldroot)):
delta[(newroot + ('self',))] = newroot
self.update(delta)
|
This function clones all the values in the dictionary
where the keys starts with oldroot to the same
path but with oldroot replaced with newroot, also
changing the root in the value if it is a path.
There are a couple of exception:
- If the path does not start with oldroot but it *is*
oldroot itself, it is cloned to newpath only if the
value is not None.
- If the value of a path does not start with oldroot
but it *is* oldroot itself, it is not changed.
- If the path ends with ('self',), and the value is
oldroot itself, the values is changed to newpath.
- If the path, after changing the oldroot with the newroot,
already exists and is not None, that value is not cloned,
and the old one is preserved.
>>> m = Mesh({
... ('fib', 'n'): ('nat',),
... ('fib', 'prev'): ('fib', 'n'),
... ('fib',): ('fib', 'prev'),
... ('fib', 'self'): ('fib',),
... ('fib', 'call'): ('fib',),
... ('fib', 'none'): None,
...
... ('tgt',): ('fib',),
... ('tgt', 'n'): ('k',)
... })
>>> m.clone(('fib',), ('tgt',))
>>> m[('tgt',)]
('fib', 'prev')
>>> m[('tgt', 'n')]
('k',)
>>> m[('tgt', 'prev')]
('tgt', 'n')
>>> m[('tgt', 'self')]
('tgt',)
>>> m[('tgt', 'call')]
('fib',)
>>> m[('tgt', 'none')] #(None )
>>> m.clone(('fib', 'none'), ('tgt',))
>>> m[('tgt',)]
('fib', 'prev')
A special case is cloning from ('same'). That is the
only built-in function. When cloning from it, this function
will check if the Mesh.valueof(newpath+('first',)) is the
same of newpath+('second',). If so, ('same', 'self') will have
value ('same', 'then'), else ('same', 'else').
>>> m = Mesh({
... ('same',): None,
... ('success',): None,
... ('fail',): None,
... ('c',): None,
... ('b',): ('c',),
... ('a',): ('same',),
... ('a', 'first'): ('b',),
... ('a', 'second'): ('c',),
... ('a', 'then'): ('success',),
... ('a', 'else'): ('fail',)
... })
>>> m.clone(('same',), ('a',))
>>> m[('a', 'self')]
('a', 'then')
>>> m.valueof(('a', 'self'))
('success',)
|
nylo/mesh.py
|
clone
|
VeggeroNylo/nylo
| 18
|
python
|
def clone(self, oldroot: Tuple[str], newroot: Tuple[str], done=()):
"\n\t\tThis function clones all the values in the dictionary\n\t\twhere the keys starts with oldroot to the same\n\t\tpath but with oldroot replaced with newroot, also\n\t\tchanging the root in the value if it is a path.\n\t\tThere are a couple of exception: \n\t\t- If the path does not start with oldroot but it *is* \n\t\toldroot itself, it is cloned to newpath only if the\n\t\tvalue is not None.\n\t\t- If the value of a path does not start with oldroot\n\t\tbut it *is* oldroot itself, it is not changed.\n\t\t- If the path ends with ('self',), and the value is\n\t\toldroot itself, the values is changed to newpath.\n\t\t- If the path, after changing the oldroot with the newroot,\n\t\talready exists and is not None, that value is not cloned,\n\t\tand the old one is preserved.\n\t\t\n\t\t>>> m = Mesh({\n\t\t...\t ('fib', 'n'): ('nat',),\n\t\t... ('fib', 'prev'): ('fib', 'n'),\n\t\t... ('fib',): ('fib', 'prev'),\n\t\t... ('fib', 'self'): ('fib',),\n\t\t... ('fib', 'call'): ('fib',),\n\t\t... ('fib', 'none'): None,\n\t\t...\n\t\t... ('tgt',): ('fib',),\n\t\t... ('tgt', 'n'): ('k',)\n\t\t... })\n\t\t>>> m.clone(('fib',), ('tgt',))\n\t\t>>> m[('tgt',)]\n\t\t('fib', 'prev')\n\t\t>>> m[('tgt', 'n')]\n\t\t('k',)\n\t\t>>> m[('tgt', 'prev')]\n\t\t('tgt', 'n')\n\t\t>>> m[('tgt', 'self')]\n\t\t('tgt',)\n\t\t>>> m[('tgt', 'call')]\n\t\t('fib',)\n\t\t>>> m[('tgt', 'none')] #(None\t)\n\t\t\n\t\t>>> m.clone(('fib', 'none'), ('tgt',))\n\t\t>>> m[('tgt',)]\n\t\t('fib', 'prev')\n\t\t\n\t\tA special case is cloning from ('same'). That is the\n\t\tonly built-in function. When cloning from it, this function\n\t\twill check if the Mesh.valueof(newpath+('first',)) is the\n\t\tsame of newpath+('second',). If so, ('same', 'self') will have\n\t\tvalue ('same', 'then'), else ('same', 'else').\n\t\t\n\t\t>>> m = Mesh({\n\t\t... ('same',): None,\n\t\t... ('success',): None,\n\t\t... ('fail',): None,\n\t\t... ('c',): None,\n\t\t... ('b',): ('c',),\n\t\t... ('a',): ('same',),\n\t\t... ('a', 'first'): ('b',),\n\t\t... ('a', 'second'): ('c',),\n\t\t... ('a', 'then'): ('success',),\n\t\t... ('a', 'else'): ('fail',)\n\t\t... })\n\t\t>>> m.clone(('same',), ('a',))\n\t\t>>> m[('a', 'self')]\n\t\t('a', 'then')\n\t\t>>> m.valueof(('a', 'self'))\n\t\t('success',)\n\t\t"
delta = {}
selfpath = (oldroot + ('self',))
if (not (oldroot in self)):
self.valueof(oldroot, done)
blockeds = set()
for (key, value) in sorted(self.items(), key=(lambda x: x[0])):
newkey = chroot(key, oldroot, newroot)
if any(((key[:len(b)] == b) for b in blockeds)):
continue
if (newkey == key):
continue
if (not ((newkey in self) and (self[newkey] is not None))):
newval = (chroot(value, oldroot, newroot) if (not (value is None)) else None)
delta[newkey] = newval
else:
blockeds.add(key)
if ((oldroot in self) and self[oldroot]):
delta[newroot] = self[oldroot]
if (oldroot == ('same',)):
delta[(newroot + ('self',))] = (newroot + (('then',) if (self.valueof((newroot + ('first',)), done) == self.valueof((newroot + ('second',)), done)) else ('else',)))
if ((selfpath in self) and (self[selfpath] == oldroot)):
delta[(newroot + ('self',))] = newroot
self.update(delta)
|
def clone(self, oldroot: Tuple[str], newroot: Tuple[str], done=()):
"\n\t\tThis function clones all the values in the dictionary\n\t\twhere the keys starts with oldroot to the same\n\t\tpath but with oldroot replaced with newroot, also\n\t\tchanging the root in the value if it is a path.\n\t\tThere are a couple of exception: \n\t\t- If the path does not start with oldroot but it *is* \n\t\toldroot itself, it is cloned to newpath only if the\n\t\tvalue is not None.\n\t\t- If the value of a path does not start with oldroot\n\t\tbut it *is* oldroot itself, it is not changed.\n\t\t- If the path ends with ('self',), and the value is\n\t\toldroot itself, the values is changed to newpath.\n\t\t- If the path, after changing the oldroot with the newroot,\n\t\talready exists and is not None, that value is not cloned,\n\t\tand the old one is preserved.\n\t\t\n\t\t>>> m = Mesh({\n\t\t...\t ('fib', 'n'): ('nat',),\n\t\t... ('fib', 'prev'): ('fib', 'n'),\n\t\t... ('fib',): ('fib', 'prev'),\n\t\t... ('fib', 'self'): ('fib',),\n\t\t... ('fib', 'call'): ('fib',),\n\t\t... ('fib', 'none'): None,\n\t\t...\n\t\t... ('tgt',): ('fib',),\n\t\t... ('tgt', 'n'): ('k',)\n\t\t... })\n\t\t>>> m.clone(('fib',), ('tgt',))\n\t\t>>> m[('tgt',)]\n\t\t('fib', 'prev')\n\t\t>>> m[('tgt', 'n')]\n\t\t('k',)\n\t\t>>> m[('tgt', 'prev')]\n\t\t('tgt', 'n')\n\t\t>>> m[('tgt', 'self')]\n\t\t('tgt',)\n\t\t>>> m[('tgt', 'call')]\n\t\t('fib',)\n\t\t>>> m[('tgt', 'none')] #(None\t)\n\t\t\n\t\t>>> m.clone(('fib', 'none'), ('tgt',))\n\t\t>>> m[('tgt',)]\n\t\t('fib', 'prev')\n\t\t\n\t\tA special case is cloning from ('same'). That is the\n\t\tonly built-in function. When cloning from it, this function\n\t\twill check if the Mesh.valueof(newpath+('first',)) is the\n\t\tsame of newpath+('second',). If so, ('same', 'self') will have\n\t\tvalue ('same', 'then'), else ('same', 'else').\n\t\t\n\t\t>>> m = Mesh({\n\t\t... ('same',): None,\n\t\t... ('success',): None,\n\t\t... ('fail',): None,\n\t\t... ('c',): None,\n\t\t... ('b',): ('c',),\n\t\t... ('a',): ('same',),\n\t\t... ('a', 'first'): ('b',),\n\t\t... ('a', 'second'): ('c',),\n\t\t... ('a', 'then'): ('success',),\n\t\t... ('a', 'else'): ('fail',)\n\t\t... })\n\t\t>>> m.clone(('same',), ('a',))\n\t\t>>> m[('a', 'self')]\n\t\t('a', 'then')\n\t\t>>> m.valueof(('a', 'self'))\n\t\t('success',)\n\t\t"
delta = {}
selfpath = (oldroot + ('self',))
if (not (oldroot in self)):
self.valueof(oldroot, done)
blockeds = set()
for (key, value) in sorted(self.items(), key=(lambda x: x[0])):
newkey = chroot(key, oldroot, newroot)
if any(((key[:len(b)] == b) for b in blockeds)):
continue
if (newkey == key):
continue
if (not ((newkey in self) and (self[newkey] is not None))):
newval = (chroot(value, oldroot, newroot) if (not (value is None)) else None)
delta[newkey] = newval
else:
blockeds.add(key)
if ((oldroot in self) and self[oldroot]):
delta[newroot] = self[oldroot]
if (oldroot == ('same',)):
delta[(newroot + ('self',))] = (newroot + (('then',) if (self.valueof((newroot + ('first',)), done) == self.valueof((newroot + ('second',)), done)) else ('else',)))
if ((selfpath in self) and (self[selfpath] == oldroot)):
delta[(newroot + ('self',))] = newroot
self.update(delta)<|docstring|>This function clones all the values in the dictionary
where the keys starts with oldroot to the same
path but with oldroot replaced with newroot, also
changing the root in the value if it is a path.
There are a couple of exception:
- If the path does not start with oldroot but it *is*
oldroot itself, it is cloned to newpath only if the
value is not None.
- If the value of a path does not start with oldroot
but it *is* oldroot itself, it is not changed.
- If the path ends with ('self',), and the value is
oldroot itself, the values is changed to newpath.
- If the path, after changing the oldroot with the newroot,
already exists and is not None, that value is not cloned,
and the old one is preserved.
>>> m = Mesh({
... ('fib', 'n'): ('nat',),
... ('fib', 'prev'): ('fib', 'n'),
... ('fib',): ('fib', 'prev'),
... ('fib', 'self'): ('fib',),
... ('fib', 'call'): ('fib',),
... ('fib', 'none'): None,
...
... ('tgt',): ('fib',),
... ('tgt', 'n'): ('k',)
... })
>>> m.clone(('fib',), ('tgt',))
>>> m[('tgt',)]
('fib', 'prev')
>>> m[('tgt', 'n')]
('k',)
>>> m[('tgt', 'prev')]
('tgt', 'n')
>>> m[('tgt', 'self')]
('tgt',)
>>> m[('tgt', 'call')]
('fib',)
>>> m[('tgt', 'none')] #(None )
>>> m.clone(('fib', 'none'), ('tgt',))
>>> m[('tgt',)]
('fib', 'prev')
A special case is cloning from ('same'). That is the
only built-in function. When cloning from it, this function
will check if the Mesh.valueof(newpath+('first',)) is the
same of newpath+('second',). If so, ('same', 'self') will have
value ('same', 'then'), else ('same', 'else').
>>> m = Mesh({
... ('same',): None,
... ('success',): None,
... ('fail',): None,
... ('c',): None,
... ('b',): ('c',),
... ('a',): ('same',),
... ('a', 'first'): ('b',),
... ('a', 'second'): ('c',),
... ('a', 'then'): ('success',),
... ('a', 'else'): ('fail',)
... })
>>> m.clone(('same',), ('a',))
>>> m[('a', 'self')]
('a', 'then')
>>> m.valueof(('a', 'self'))
('success',)<|endoftext|>
|
fa0e5a90022e279c4414aa108c77fbe3a8ac79d51b42cc85de199804361ee6f0
|
def __init__(self, x, y=None, data_points=100, add_interaction=True, aggregate_fn='count', width=400, height=400, step_size=None, step_size_type=int, **library_specific_params):
'\n Description:\n \n -------------------------------------------\n Input:\n x\n y\n data_points\n add_interaction \n aggregate_fn\n width\n height\n step_size\n step_size_type\n x_label_map\n y_label_map\n **library_specific_params\n -------------------------------------------\n\n Ouput:\n\n '
self.x = x
self.y = y
self.data_points = data_points
self.add_interaction = add_interaction
self.aggregate_fn = aggregate_fn
self.height = height
self.width = width
self.stride = step_size
self.stride_type = step_size_type
self.library_specific_params = library_specific_params
|
Description:
-------------------------------------------
Input:
x
y
data_points
add_interaction
aggregate_fn
width
height
step_size
step_size_type
x_label_map
y_label_map
**library_specific_params
-------------------------------------------
Ouput:
|
python/cuXfilter/charts/core/aggregate/core_aggregate_bar.py
|
__init__
|
AjayThorve/cuxfilter
| 2
|
python
|
def __init__(self, x, y=None, data_points=100, add_interaction=True, aggregate_fn='count', width=400, height=400, step_size=None, step_size_type=int, **library_specific_params):
'\n Description:\n \n -------------------------------------------\n Input:\n x\n y\n data_points\n add_interaction \n aggregate_fn\n width\n height\n step_size\n step_size_type\n x_label_map\n y_label_map\n **library_specific_params\n -------------------------------------------\n\n Ouput:\n\n '
self.x = x
self.y = y
self.data_points = data_points
self.add_interaction = add_interaction
self.aggregate_fn = aggregate_fn
self.height = height
self.width = width
self.stride = step_size
self.stride_type = step_size_type
self.library_specific_params = library_specific_params
|
def __init__(self, x, y=None, data_points=100, add_interaction=True, aggregate_fn='count', width=400, height=400, step_size=None, step_size_type=int, **library_specific_params):
'\n Description:\n \n -------------------------------------------\n Input:\n x\n y\n data_points\n add_interaction \n aggregate_fn\n width\n height\n step_size\n step_size_type\n x_label_map\n y_label_map\n **library_specific_params\n -------------------------------------------\n\n Ouput:\n\n '
self.x = x
self.y = y
self.data_points = data_points
self.add_interaction = add_interaction
self.aggregate_fn = aggregate_fn
self.height = height
self.width = width
self.stride = step_size
self.stride_type = step_size_type
self.library_specific_params = library_specific_params<|docstring|>Description:
-------------------------------------------
Input:
x
y
data_points
add_interaction
aggregate_fn
width
height
step_size
step_size_type
x_label_map
y_label_map
**library_specific_params
-------------------------------------------
Ouput:<|endoftext|>
|
0c3e31984bc2dafd0d1b75be72519db79d096227e58c1ea5251202059f6d05eb
|
def initiate_chart(self, dashboard_cls):
'\n Description:\n \n -------------------------------------------\n Input:\n data: cudf DataFrame\n -------------------------------------------\n\n Ouput:\n\n '
self.min_value = dashboard_cls._data[self.x].min()
self.max_value = dashboard_cls._data[self.x].max()
if (self.stride is None):
if ((self.max_value < 1) and (self.stride_type == int)):
self.stride_type = float
self.stride = self.stride_type(((self.max_value - self.min_value) / self.data_points))
self.calculate_source(dashboard_cls._data)
self.generate_chart()
self.apply_mappers()
if self.add_interaction:
self.add_range_slider_filter(dashboard_cls)
self.add_events(dashboard_cls)
|
Description:
-------------------------------------------
Input:
data: cudf DataFrame
-------------------------------------------
Ouput:
|
python/cuXfilter/charts/core/aggregate/core_aggregate_bar.py
|
initiate_chart
|
AjayThorve/cuxfilter
| 2
|
python
|
def initiate_chart(self, dashboard_cls):
'\n Description:\n \n -------------------------------------------\n Input:\n data: cudf DataFrame\n -------------------------------------------\n\n Ouput:\n\n '
self.min_value = dashboard_cls._data[self.x].min()
self.max_value = dashboard_cls._data[self.x].max()
if (self.stride is None):
if ((self.max_value < 1) and (self.stride_type == int)):
self.stride_type = float
self.stride = self.stride_type(((self.max_value - self.min_value) / self.data_points))
self.calculate_source(dashboard_cls._data)
self.generate_chart()
self.apply_mappers()
if self.add_interaction:
self.add_range_slider_filter(dashboard_cls)
self.add_events(dashboard_cls)
|
def initiate_chart(self, dashboard_cls):
'\n Description:\n \n -------------------------------------------\n Input:\n data: cudf DataFrame\n -------------------------------------------\n\n Ouput:\n\n '
self.min_value = dashboard_cls._data[self.x].min()
self.max_value = dashboard_cls._data[self.x].max()
if (self.stride is None):
if ((self.max_value < 1) and (self.stride_type == int)):
self.stride_type = float
self.stride = self.stride_type(((self.max_value - self.min_value) / self.data_points))
self.calculate_source(dashboard_cls._data)
self.generate_chart()
self.apply_mappers()
if self.add_interaction:
self.add_range_slider_filter(dashboard_cls)
self.add_events(dashboard_cls)<|docstring|>Description:
-------------------------------------------
Input:
data: cudf DataFrame
-------------------------------------------
Ouput:<|endoftext|>
|
6aa805bfe26dd3302741d8bef25af871798b7815b8a4c33bb29f2be8e225bb0d
|
def calculate_source(self, data, patch_update=False):
'\n Description:\n \n -------------------------------------------\n Input:\n\n -------------------------------------------\n\n Ouput:\n '
if ((self.y == self.x) or (self.y is None)):
df = calc_value_counts(data[self.x].to_gpu_array(), self.data_points)
else:
self.aggregate_fn = 'mean'
df = calc_groupby(self, data)
dict_temp = {'X': list(df[0].astype(df[0].dtype)), 'Y': list(df[1].astype(df[1].dtype))}
self.format_source_data(dict_temp, patch_update)
|
Description:
-------------------------------------------
Input:
-------------------------------------------
Ouput:
|
python/cuXfilter/charts/core/aggregate/core_aggregate_bar.py
|
calculate_source
|
AjayThorve/cuxfilter
| 2
|
python
|
def calculate_source(self, data, patch_update=False):
'\n Description:\n \n -------------------------------------------\n Input:\n\n -------------------------------------------\n\n Ouput:\n '
if ((self.y == self.x) or (self.y is None)):
df = calc_value_counts(data[self.x].to_gpu_array(), self.data_points)
else:
self.aggregate_fn = 'mean'
df = calc_groupby(self, data)
dict_temp = {'X': list(df[0].astype(df[0].dtype)), 'Y': list(df[1].astype(df[1].dtype))}
self.format_source_data(dict_temp, patch_update)
|
def calculate_source(self, data, patch_update=False):
'\n Description:\n \n -------------------------------------------\n Input:\n\n -------------------------------------------\n\n Ouput:\n '
if ((self.y == self.x) or (self.y is None)):
df = calc_value_counts(data[self.x].to_gpu_array(), self.data_points)
else:
self.aggregate_fn = 'mean'
df = calc_groupby(self, data)
dict_temp = {'X': list(df[0].astype(df[0].dtype)), 'Y': list(df[1].astype(df[1].dtype))}
self.format_source_data(dict_temp, patch_update)<|docstring|>Description:
-------------------------------------------
Input:
-------------------------------------------
Ouput:<|endoftext|>
|
6a1026d33b531399e63d35e7a8c247b457479a79c14d7029fef7fb08d3ccd819
|
def add_range_slider_filter(self, dashboard_cls):
'\n Description: add range slider to the bottom of the chart, for the filter function\n to facilitate interaction behavior, that updates the rest of the charts on the page, using datatiles\n -------------------------------------------\n Input:\n\n -------------------------------------------\n\n Ouput:\n '
if (self.stride is None):
self.stride = self.stride_type(((self.max_value - self.min_value) / self.data_points))
self.filter_widget = pn.widgets.RangeSlider(start=self.min_value, end=self.max_value, value=(self.min_value, self.max_value), step=self.stride, **{'width': self.width}, sizing_mode='scale_width')
def filter_widget_callback(event):
if (dashboard_cls._active_view != self.name):
dashboard_cls._reset_current_view(new_active_view=self)
dashboard_cls._calc_data_tiles()
dashboard_cls._query_datatiles_by_range(event.new)
self.filter_widget.param.watch(filter_widget_callback, ['value'], onlychanged=False)
|
Description: add range slider to the bottom of the chart, for the filter function
to facilitate interaction behavior, that updates the rest of the charts on the page, using datatiles
-------------------------------------------
Input:
-------------------------------------------
Ouput:
|
python/cuXfilter/charts/core/aggregate/core_aggregate_bar.py
|
add_range_slider_filter
|
AjayThorve/cuxfilter
| 2
|
python
|
def add_range_slider_filter(self, dashboard_cls):
'\n Description: add range slider to the bottom of the chart, for the filter function\n to facilitate interaction behavior, that updates the rest of the charts on the page, using datatiles\n -------------------------------------------\n Input:\n\n -------------------------------------------\n\n Ouput:\n '
if (self.stride is None):
self.stride = self.stride_type(((self.max_value - self.min_value) / self.data_points))
self.filter_widget = pn.widgets.RangeSlider(start=self.min_value, end=self.max_value, value=(self.min_value, self.max_value), step=self.stride, **{'width': self.width}, sizing_mode='scale_width')
def filter_widget_callback(event):
if (dashboard_cls._active_view != self.name):
dashboard_cls._reset_current_view(new_active_view=self)
dashboard_cls._calc_data_tiles()
dashboard_cls._query_datatiles_by_range(event.new)
self.filter_widget.param.watch(filter_widget_callback, ['value'], onlychanged=False)
|
def add_range_slider_filter(self, dashboard_cls):
'\n Description: add range slider to the bottom of the chart, for the filter function\n to facilitate interaction behavior, that updates the rest of the charts on the page, using datatiles\n -------------------------------------------\n Input:\n\n -------------------------------------------\n\n Ouput:\n '
if (self.stride is None):
self.stride = self.stride_type(((self.max_value - self.min_value) / self.data_points))
self.filter_widget = pn.widgets.RangeSlider(start=self.min_value, end=self.max_value, value=(self.min_value, self.max_value), step=self.stride, **{'width': self.width}, sizing_mode='scale_width')
def filter_widget_callback(event):
if (dashboard_cls._active_view != self.name):
dashboard_cls._reset_current_view(new_active_view=self)
dashboard_cls._calc_data_tiles()
dashboard_cls._query_datatiles_by_range(event.new)
self.filter_widget.param.watch(filter_widget_callback, ['value'], onlychanged=False)<|docstring|>Description: add range slider to the bottom of the chart, for the filter function
to facilitate interaction behavior, that updates the rest of the charts on the page, using datatiles
-------------------------------------------
Input:
-------------------------------------------
Ouput:<|endoftext|>
|
d37bfc459d5b74942903a41d1d3cb54a0c9df17ce83c9dcce62c54e9295b7387
|
def compute_query_dict(self, query_str_dict):
'\n Description: \n\n -------------------------------------------\n Input:\n query_dict = reference to dashboard.__cls__.query_dict\n -------------------------------------------\n\n Ouput:\n '
if (self.filter_widget.value != (self.filter_widget.start, self.filter_widget.end)):
(min_temp, max_temp) = self.filter_widget.value
query_str_dict[self.name] = ((((str(self.stride_type(min_temp)) + '<=') + str(self.x)) + '<=') + str(self.stride_type(max_temp)))
|
Description:
-------------------------------------------
Input:
query_dict = reference to dashboard.__cls__.query_dict
-------------------------------------------
Ouput:
|
python/cuXfilter/charts/core/aggregate/core_aggregate_bar.py
|
compute_query_dict
|
AjayThorve/cuxfilter
| 2
|
python
|
def compute_query_dict(self, query_str_dict):
'\n Description: \n\n -------------------------------------------\n Input:\n query_dict = reference to dashboard.__cls__.query_dict\n -------------------------------------------\n\n Ouput:\n '
if (self.filter_widget.value != (self.filter_widget.start, self.filter_widget.end)):
(min_temp, max_temp) = self.filter_widget.value
query_str_dict[self.name] = ((((str(self.stride_type(min_temp)) + '<=') + str(self.x)) + '<=') + str(self.stride_type(max_temp)))
|
def compute_query_dict(self, query_str_dict):
'\n Description: \n\n -------------------------------------------\n Input:\n query_dict = reference to dashboard.__cls__.query_dict\n -------------------------------------------\n\n Ouput:\n '
if (self.filter_widget.value != (self.filter_widget.start, self.filter_widget.end)):
(min_temp, max_temp) = self.filter_widget.value
query_str_dict[self.name] = ((((str(self.stride_type(min_temp)) + '<=') + str(self.x)) + '<=') + str(self.stride_type(max_temp)))<|docstring|>Description:
-------------------------------------------
Input:
query_dict = reference to dashboard.__cls__.query_dict
-------------------------------------------
Ouput:<|endoftext|>
|
6e3fda399db81529069df5c4e680d2801dfa6cc7cdfb7e2a9015c1a6a12c638e
|
def add_events(self, dashboard_cls):
'\n Description: \n\n -------------------------------------------\n Input:\n\n -------------------------------------------\n\n Ouput:\n '
if (self.reset_event is not None):
self.add_reset_event(dashboard_cls)
|
Description:
-------------------------------------------
Input:
-------------------------------------------
Ouput:
|
python/cuXfilter/charts/core/aggregate/core_aggregate_bar.py
|
add_events
|
AjayThorve/cuxfilter
| 2
|
python
|
def add_events(self, dashboard_cls):
'\n Description: \n\n -------------------------------------------\n Input:\n\n -------------------------------------------\n\n Ouput:\n '
if (self.reset_event is not None):
self.add_reset_event(dashboard_cls)
|
def add_events(self, dashboard_cls):
'\n Description: \n\n -------------------------------------------\n Input:\n\n -------------------------------------------\n\n Ouput:\n '
if (self.reset_event is not None):
self.add_reset_event(dashboard_cls)<|docstring|>Description:
-------------------------------------------
Input:
-------------------------------------------
Ouput:<|endoftext|>
|
be05072aea77c8c8c7d7d5cba763b90a3cb5cfcb51a4286355a468697ddbdcc8
|
def add_reset_event(self, dashboard_cls):
'\n Description: \n\n -------------------------------------------\n Input:\n\n -------------------------------------------\n\n Ouput:\n '
def reset_callback(event):
self.filter_widget.value = (self.filter_widget.start, self.filter_widget.end)
self.add_event(self.reset_event, reset_callback)
|
Description:
-------------------------------------------
Input:
-------------------------------------------
Ouput:
|
python/cuXfilter/charts/core/aggregate/core_aggregate_bar.py
|
add_reset_event
|
AjayThorve/cuxfilter
| 2
|
python
|
def add_reset_event(self, dashboard_cls):
'\n Description: \n\n -------------------------------------------\n Input:\n\n -------------------------------------------\n\n Ouput:\n '
def reset_callback(event):
self.filter_widget.value = (self.filter_widget.start, self.filter_widget.end)
self.add_event(self.reset_event, reset_callback)
|
def add_reset_event(self, dashboard_cls):
'\n Description: \n\n -------------------------------------------\n Input:\n\n -------------------------------------------\n\n Ouput:\n '
def reset_callback(event):
self.filter_widget.value = (self.filter_widget.start, self.filter_widget.end)
self.add_event(self.reset_event, reset_callback)<|docstring|>Description:
-------------------------------------------
Input:
-------------------------------------------
Ouput:<|endoftext|>
|
f70b4ffbb8f7cbbe80d3521293e9b818a3ff814ec5f7531f330441823253e211
|
def create_app(run_mode=os.getenv('FLASK_ENV', 'production')):
'Return a configured Flask App using the Factory method.'
app = Flask(__name__)
app.config.from_object(CONFIGURATION[run_mode])
from auth_api.resources import API_BLUEPRINT, OPS_BLUEPRINT
db.init_app(app)
ma.init_app(app)
app.register_blueprint(API_BLUEPRINT)
app.register_blueprint(OPS_BLUEPRINT)
setup_jwt_manager(app, JWT)
ExceptionHandler(app)
@app.after_request
def add_version(response):
version = get_run_version()
response.headers['API'] = f'auth_api/{version}'
return response
register_shellcontext(app)
return app
|
Return a configured Flask App using the Factory method.
|
auth-api/src/auth_api/__init__.py
|
create_app
|
sumesh-aot/sbc-auth
| 0
|
python
|
def create_app(run_mode=os.getenv('FLASK_ENV', 'production')):
app = Flask(__name__)
app.config.from_object(CONFIGURATION[run_mode])
from auth_api.resources import API_BLUEPRINT, OPS_BLUEPRINT
db.init_app(app)
ma.init_app(app)
app.register_blueprint(API_BLUEPRINT)
app.register_blueprint(OPS_BLUEPRINT)
setup_jwt_manager(app, JWT)
ExceptionHandler(app)
@app.after_request
def add_version(response):
version = get_run_version()
response.headers['API'] = f'auth_api/{version}'
return response
register_shellcontext(app)
return app
|
def create_app(run_mode=os.getenv('FLASK_ENV', 'production')):
app = Flask(__name__)
app.config.from_object(CONFIGURATION[run_mode])
from auth_api.resources import API_BLUEPRINT, OPS_BLUEPRINT
db.init_app(app)
ma.init_app(app)
app.register_blueprint(API_BLUEPRINT)
app.register_blueprint(OPS_BLUEPRINT)
setup_jwt_manager(app, JWT)
ExceptionHandler(app)
@app.after_request
def add_version(response):
version = get_run_version()
response.headers['API'] = f'auth_api/{version}'
return response
register_shellcontext(app)
return app<|docstring|>Return a configured Flask App using the Factory method.<|endoftext|>
|
7189217c1dc834135b5f2bc2f75e454023c2bb4e64dcf51587110e4a211fb589
|
def setup_jwt_manager(app, jwt_manager):
'Use flask app to configure the JWTManager to work for a particular Realm.'
def get_roles(a_dict):
return a_dict['realm_access']['roles']
app.config['JWT_ROLE_CALLBACK'] = get_roles
jwt_manager.init_app(app)
|
Use flask app to configure the JWTManager to work for a particular Realm.
|
auth-api/src/auth_api/__init__.py
|
setup_jwt_manager
|
sumesh-aot/sbc-auth
| 0
|
python
|
def setup_jwt_manager(app, jwt_manager):
def get_roles(a_dict):
return a_dict['realm_access']['roles']
app.config['JWT_ROLE_CALLBACK'] = get_roles
jwt_manager.init_app(app)
|
def setup_jwt_manager(app, jwt_manager):
def get_roles(a_dict):
return a_dict['realm_access']['roles']
app.config['JWT_ROLE_CALLBACK'] = get_roles
jwt_manager.init_app(app)<|docstring|>Use flask app to configure the JWTManager to work for a particular Realm.<|endoftext|>
|
fde0da1eea3567035dffc20a9c88dfe6bb282416066e7a616ea31fee3b7d456a
|
def register_shellcontext(app):
'Register shell context objects.'
def shell_context():
'Shell context objects.'
return {'app': app, 'jwt': JWT, 'db': db, 'models': models}
app.shell_context_processor(shell_context)
|
Register shell context objects.
|
auth-api/src/auth_api/__init__.py
|
register_shellcontext
|
sumesh-aot/sbc-auth
| 0
|
python
|
def register_shellcontext(app):
def shell_context():
'Shell context objects.'
return {'app': app, 'jwt': JWT, 'db': db, 'models': models}
app.shell_context_processor(shell_context)
|
def register_shellcontext(app):
def shell_context():
'Shell context objects.'
return {'app': app, 'jwt': JWT, 'db': db, 'models': models}
app.shell_context_processor(shell_context)<|docstring|>Register shell context objects.<|endoftext|>
|
a397d7390ead43785a2de3040df4847c3b37189bdcf4436aa7aefc9f7d041c10
|
def shell_context():
'Shell context objects.'
return {'app': app, 'jwt': JWT, 'db': db, 'models': models}
|
Shell context objects.
|
auth-api/src/auth_api/__init__.py
|
shell_context
|
sumesh-aot/sbc-auth
| 0
|
python
|
def shell_context():
return {'app': app, 'jwt': JWT, 'db': db, 'models': models}
|
def shell_context():
return {'app': app, 'jwt': JWT, 'db': db, 'models': models}<|docstring|>Shell context objects.<|endoftext|>
|
1606529b3f84a9a70d76a2e6a73d110267c28e8cb2e167cebfdb833e63991fa6
|
def tk_loop(root, ex):
'\n Checks for messages every half a second\n '
if (ex.msg_list is not None):
ex.updateConversation()
root.after(2000, tk_loop, root, ex)
|
Checks for messages every half a second
|
GUI_app.py
|
tk_loop
|
Howardlinsanity/CIS-433-Group-Project-W18
| 0
|
python
|
def tk_loop(root, ex):
'\n \n '
if (ex.msg_list is not None):
ex.updateConversation()
root.after(2000, tk_loop, root, ex)
|
def tk_loop(root, ex):
'\n \n '
if (ex.msg_list is not None):
ex.updateConversation()
root.after(2000, tk_loop, root, ex)<|docstring|>Checks for messages every half a second<|endoftext|>
|
1dc3d15826229c7fb24aa86c450090680e6a5095c5e984a4f4492ed2f44bcc8a
|
def initiate_tk_loop(root, ex):
"\n I honestly don't know how to thread this other than doing this terrible piece of code\n "
root.after(2000, tk_loop, root, ex)
|
I honestly don't know how to thread this other than doing this terrible piece of code
|
GUI_app.py
|
initiate_tk_loop
|
Howardlinsanity/CIS-433-Group-Project-W18
| 0
|
python
|
def initiate_tk_loop(root, ex):
"\n \n "
root.after(2000, tk_loop, root, ex)
|
def initiate_tk_loop(root, ex):
"\n \n "
root.after(2000, tk_loop, root, ex)<|docstring|>I honestly don't know how to thread this other than doing this terrible piece of code<|endoftext|>
|
b012ac25b76c0c28b0b14db48dd1e45e97d86eb5cb9048699d4731c26e44af61
|
def removeEmoji(msg):
'\n removes non ASCII chars\n :param msg:\n :return: new_msg with emjoy char removed\n '
new_msg = ''
for ch in msg:
pass
return new_msg
|
removes non ASCII chars
:param msg:
:return: new_msg with emjoy char removed
|
GUI_app.py
|
removeEmoji
|
Howardlinsanity/CIS-433-Group-Project-W18
| 0
|
python
|
def removeEmoji(msg):
'\n removes non ASCII chars\n :param msg:\n :return: new_msg with emjoy char removed\n '
new_msg =
for ch in msg:
pass
return new_msg
|
def removeEmoji(msg):
'\n removes non ASCII chars\n :param msg:\n :return: new_msg with emjoy char removed\n '
new_msg =
for ch in msg:
pass
return new_msg<|docstring|>removes non ASCII chars
:param msg:
:return: new_msg with emjoy char removed<|endoftext|>
|
ef7aad36509316eee7f6a04f7a1f8ef2a5639013bad8e7a5c1300a6c4baa7d3e
|
def __init__(self, email, password, user_agent=None, max_tries=5, session_cookies=None, logging_level=logging.INFO):
'\n Initializes and logs in the client\n\n :param email: Facebook `email`, `id` or `phone number`\n :param password: Facebook account password\n :param user_agent: Custom user agent to use when sending requests. If `None`, user agent will be chosen from a premade list (see :any:`utils.USER_AGENTS`)\n :param max_tries: Maximum number of times to try logging in\n :param session_cookies: Cookies from a previous session (Will default to login if these are invalid)\n :param logging_level: Configures the `logging level <https://docs.python.org/3/library/logging.html#logging-levels>`_. Defaults to `INFO`\n :type max_tries: int\n :type session_cookies: dict\n :type logging_level: int\n :raises: FBchatException on failed login\n '
(self.sticky, self.pool) = (None, None)
self._session = requests.session()
self.req_counter = 1
self.seq = '0'
self.payloadDefault = {}
self.client = 'mercury'
self.default_thread_id = None
self.default_thread_type = None
self.req_url = ReqUrl()
self.most_recent_message = None
self.most_recent_messages_queue = Queue()
if (not user_agent):
user_agent = choice(USER_AGENTS)
self._header = {'Content-Type': 'application/x-www-form-urlencoded', 'Referer': self.req_url.BASE, 'Origin': self.req_url.BASE, 'User-Agent': user_agent, 'Connection': 'keep-alive'}
handler.setLevel(logging_level)
if ((not session_cookies) or (not self.setSession(session_cookies)) or (not self.isLoggedIn())):
self.login(email, password, max_tries)
else:
self.email = email
self.password = password
|
Initializes and logs in the client
:param email: Facebook `email`, `id` or `phone number`
:param password: Facebook account password
:param user_agent: Custom user agent to use when sending requests. If `None`, user agent will be chosen from a premade list (see :any:`utils.USER_AGENTS`)
:param max_tries: Maximum number of times to try logging in
:param session_cookies: Cookies from a previous session (Will default to login if these are invalid)
:param logging_level: Configures the `logging level <https://docs.python.org/3/library/logging.html#logging-levels>`_. Defaults to `INFO`
:type max_tries: int
:type session_cookies: dict
:type logging_level: int
:raises: FBchatException on failed login
|
GUI_app.py
|
__init__
|
Howardlinsanity/CIS-433-Group-Project-W18
| 0
|
python
|
def __init__(self, email, password, user_agent=None, max_tries=5, session_cookies=None, logging_level=logging.INFO):
'\n Initializes and logs in the client\n\n :param email: Facebook `email`, `id` or `phone number`\n :param password: Facebook account password\n :param user_agent: Custom user agent to use when sending requests. If `None`, user agent will be chosen from a premade list (see :any:`utils.USER_AGENTS`)\n :param max_tries: Maximum number of times to try logging in\n :param session_cookies: Cookies from a previous session (Will default to login if these are invalid)\n :param logging_level: Configures the `logging level <https://docs.python.org/3/library/logging.html#logging-levels>`_. Defaults to `INFO`\n :type max_tries: int\n :type session_cookies: dict\n :type logging_level: int\n :raises: FBchatException on failed login\n '
(self.sticky, self.pool) = (None, None)
self._session = requests.session()
self.req_counter = 1
self.seq = '0'
self.payloadDefault = {}
self.client = 'mercury'
self.default_thread_id = None
self.default_thread_type = None
self.req_url = ReqUrl()
self.most_recent_message = None
self.most_recent_messages_queue = Queue()
if (not user_agent):
user_agent = choice(USER_AGENTS)
self._header = {'Content-Type': 'application/x-www-form-urlencoded', 'Referer': self.req_url.BASE, 'Origin': self.req_url.BASE, 'User-Agent': user_agent, 'Connection': 'keep-alive'}
handler.setLevel(logging_level)
if ((not session_cookies) or (not self.setSession(session_cookies)) or (not self.isLoggedIn())):
self.login(email, password, max_tries)
else:
self.email = email
self.password = password
|
def __init__(self, email, password, user_agent=None, max_tries=5, session_cookies=None, logging_level=logging.INFO):
'\n Initializes and logs in the client\n\n :param email: Facebook `email`, `id` or `phone number`\n :param password: Facebook account password\n :param user_agent: Custom user agent to use when sending requests. If `None`, user agent will be chosen from a premade list (see :any:`utils.USER_AGENTS`)\n :param max_tries: Maximum number of times to try logging in\n :param session_cookies: Cookies from a previous session (Will default to login if these are invalid)\n :param logging_level: Configures the `logging level <https://docs.python.org/3/library/logging.html#logging-levels>`_. Defaults to `INFO`\n :type max_tries: int\n :type session_cookies: dict\n :type logging_level: int\n :raises: FBchatException on failed login\n '
(self.sticky, self.pool) = (None, None)
self._session = requests.session()
self.req_counter = 1
self.seq = '0'
self.payloadDefault = {}
self.client = 'mercury'
self.default_thread_id = None
self.default_thread_type = None
self.req_url = ReqUrl()
self.most_recent_message = None
self.most_recent_messages_queue = Queue()
if (not user_agent):
user_agent = choice(USER_AGENTS)
self._header = {'Content-Type': 'application/x-www-form-urlencoded', 'Referer': self.req_url.BASE, 'Origin': self.req_url.BASE, 'User-Agent': user_agent, 'Connection': 'keep-alive'}
handler.setLevel(logging_level)
if ((not session_cookies) or (not self.setSession(session_cookies)) or (not self.isLoggedIn())):
self.login(email, password, max_tries)
else:
self.email = email
self.password = password<|docstring|>Initializes and logs in the client
:param email: Facebook `email`, `id` or `phone number`
:param password: Facebook account password
:param user_agent: Custom user agent to use when sending requests. If `None`, user agent will be chosen from a premade list (see :any:`utils.USER_AGENTS`)
:param max_tries: Maximum number of times to try logging in
:param session_cookies: Cookies from a previous session (Will default to login if these are invalid)
:param logging_level: Configures the `logging level <https://docs.python.org/3/library/logging.html#logging-levels>`_. Defaults to `INFO`
:type max_tries: int
:type session_cookies: dict
:type logging_level: int
:raises: FBchatException on failed login<|endoftext|>
|
7a697962c8cb75a06f40b53662256f419d444144cab75f714dc7d31d5735125d
|
def stopListening(self):
'Cleans up the variables from startListening'
print('Logging off...')
self.listening = False
(self.sticky, self.pool) = (None, None)
|
Cleans up the variables from startListening
|
GUI_app.py
|
stopListening
|
Howardlinsanity/CIS-433-Group-Project-W18
| 0
|
python
|
def stopListening(self):
print('Logging off...')
self.listening = False
(self.sticky, self.pool) = (None, None)
|
def stopListening(self):
print('Logging off...')
self.listening = False
(self.sticky, self.pool) = (None, None)<|docstring|>Cleans up the variables from startListening<|endoftext|>
|
f203beaeb8071ea280a31fb7f57575310b115e73add094defac9d44abb4edf83
|
def listen(self, markAlive=True):
'\n Initializes and runs the listening loop continually\n\n :param markAlive: Whether this should ping the Facebook server each time the loop runs\n :type markAlive: bool\n '
self.startListening()
self.onListening()
while (self.listening and self.doOneListen(markAlive)):
pass
self.stopListening()
|
Initializes and runs the listening loop continually
:param markAlive: Whether this should ping the Facebook server each time the loop runs
:type markAlive: bool
|
GUI_app.py
|
listen
|
Howardlinsanity/CIS-433-Group-Project-W18
| 0
|
python
|
def listen(self, markAlive=True):
'\n Initializes and runs the listening loop continually\n\n :param markAlive: Whether this should ping the Facebook server each time the loop runs\n :type markAlive: bool\n '
self.startListening()
self.onListening()
while (self.listening and self.doOneListen(markAlive)):
pass
self.stopListening()
|
def listen(self, markAlive=True):
'\n Initializes and runs the listening loop continually\n\n :param markAlive: Whether this should ping the Facebook server each time the loop runs\n :type markAlive: bool\n '
self.startListening()
self.onListening()
while (self.listening and self.doOneListen(markAlive)):
pass
self.stopListening()<|docstring|>Initializes and runs the listening loop continually
:param markAlive: Whether this should ping the Facebook server each time the loop runs
:type markAlive: bool<|endoftext|>
|
03f81f9b31ce30e757a7c9f0c2f58a63fb3cf956221f148d406adf5d862de65b
|
def centerWindow(self, notself=None):
'\n This centers the window into place\n if notself is set, then it centers\n the notself window\n\n @param:\n notself - TKobject\n '
if (notself is not None):
sw = self.parent.winfo_screenwidth()
sh = self.parent.winfo_screenheight()
x = ((sw - (self.w / 2)) / 2)
y = ((sh - (self.h / 2)) / 2)
notself.geometry(('%dx%d+%d+%d' % ((self.w / 1.8), (self.h / 1.8), x, y)))
else:
sw = self.parent.winfo_screenwidth()
sh = self.parent.winfo_screenheight()
x = ((sw - self.w) / 2)
y = ((sh - self.h) / 2)
self.parent.geometry(('%dx%d+%d+%d' % (self.w, self.h, x, y)))
|
This centers the window into place
if notself is set, then it centers
the notself window
@param:
notself - TKobject
|
GUI_app.py
|
centerWindow
|
Howardlinsanity/CIS-433-Group-Project-W18
| 0
|
python
|
def centerWindow(self, notself=None):
'\n This centers the window into place\n if notself is set, then it centers\n the notself window\n\n @param:\n notself - TKobject\n '
if (notself is not None):
sw = self.parent.winfo_screenwidth()
sh = self.parent.winfo_screenheight()
x = ((sw - (self.w / 2)) / 2)
y = ((sh - (self.h / 2)) / 2)
notself.geometry(('%dx%d+%d+%d' % ((self.w / 1.8), (self.h / 1.8), x, y)))
else:
sw = self.parent.winfo_screenwidth()
sh = self.parent.winfo_screenheight()
x = ((sw - self.w) / 2)
y = ((sh - self.h) / 2)
self.parent.geometry(('%dx%d+%d+%d' % (self.w, self.h, x, y)))
|
def centerWindow(self, notself=None):
'\n This centers the window into place\n if notself is set, then it centers\n the notself window\n\n @param:\n notself - TKobject\n '
if (notself is not None):
sw = self.parent.winfo_screenwidth()
sh = self.parent.winfo_screenheight()
x = ((sw - (self.w / 2)) / 2)
y = ((sh - (self.h / 2)) / 2)
notself.geometry(('%dx%d+%d+%d' % ((self.w / 1.8), (self.h / 1.8), x, y)))
else:
sw = self.parent.winfo_screenwidth()
sh = self.parent.winfo_screenheight()
x = ((sw - self.w) / 2)
y = ((sh - self.h) / 2)
self.parent.geometry(('%dx%d+%d+%d' % (self.w, self.h, x, y)))<|docstring|>This centers the window into place
if notself is set, then it centers
the notself window
@param:
notself - TKobject<|endoftext|>
|
52125c71e4a60bbb637b0d712e1f2c3c26f10c28e81259bcbd7e23edeffb1f54
|
def startWindow(self):
'\n This method starts/creates the window for\n the UI\n '
Frame.__init__(self, self.parent, background='white')
self.style = Style()
self.style.theme_use('default')
self.pack(fill=BOTH, expand=1)
if (not self.initialized):
self.centerWindow()
else:
self.parent.geometry(('%dx%d' % (self.w, self.h)))
self.initialized = True
|
This method starts/creates the window for
the UI
|
GUI_app.py
|
startWindow
|
Howardlinsanity/CIS-433-Group-Project-W18
| 0
|
python
|
def startWindow(self):
'\n This method starts/creates the window for\n the UI\n '
Frame.__init__(self, self.parent, background='white')
self.style = Style()
self.style.theme_use('default')
self.pack(fill=BOTH, expand=1)
if (not self.initialized):
self.centerWindow()
else:
self.parent.geometry(('%dx%d' % (self.w, self.h)))
self.initialized = True
|
def startWindow(self):
'\n This method starts/creates the window for\n the UI\n '
Frame.__init__(self, self.parent, background='white')
self.style = Style()
self.style.theme_use('default')
self.pack(fill=BOTH, expand=1)
if (not self.initialized):
self.centerWindow()
else:
self.parent.geometry(('%dx%d' % (self.w, self.h)))
self.initialized = True<|docstring|>This method starts/creates the window for
the UI<|endoftext|>
|
052bb32e467da830c1c3fe4bfadd2526c04c76bd53a64381491e5db01c81d3f5
|
def resetWindow(self):
'\n Resets the window\n '
if self.initialized:
self.destroy()
if (self.loadWindow is not None):
self.loadWindow.destroy()
self.startWindow()
|
Resets the window
|
GUI_app.py
|
resetWindow
|
Howardlinsanity/CIS-433-Group-Project-W18
| 0
|
python
|
def resetWindow(self):
'\n \n '
if self.initialized:
self.destroy()
if (self.loadWindow is not None):
self.loadWindow.destroy()
self.startWindow()
|
def resetWindow(self):
'\n \n '
if self.initialized:
self.destroy()
if (self.loadWindow is not None):
self.loadWindow.destroy()
self.startWindow()<|docstring|>Resets the window<|endoftext|>
|
0d55460f2e3b1d81366e2dfc99d5a5b0505c0a3ca3dbb3702b890a55b36f50cc
|
def loginScreen(self):
'\n First screen that user will see, will require Facebook credentials to be inputted\n '
self.h = 150
self.w = 350
self.resetWindow()
self.parent.title('Welcome')
emailFrame = Frame(self)
emailFrame.pack(fill=X, side=TOP)
emailLabel = Label(emailFrame, text='Email:', background='white')
emailLabel.pack(side=LEFT, padx=15, pady=10)
self.emailEntry = Entry(emailFrame, width=30)
self.emailEntry.insert(0, self.email)
self.emailEntry.pack(side=LEFT, padx=35, pady=10)
passwordFrame = Frame(self)
passwordFrame.pack(fill=X, side=TOP)
passwordLabel = Label(passwordFrame, text='Password:', background='white')
passwordLabel.pack(side=LEFT, padx=15, pady=10)
self.passwordEntry = Entry(passwordFrame, show='*', width=30)
self.passwordEntry.bind('<Return>', self.start)
self.passwordEntry.insert(0, self.password)
self.passwordEntry.pack(side=LEFT, padx=35, pady=10)
frame = Frame(self, borderwidth=1)
frame.pack(fill=BOTH, expand=True)
self.pack(fill=BOTH, expand=True)
exitButton = Button(self, text='Exit', command=self.parent.destroy)
exitButton.pack(side=RIGHT, padx=5, pady=5)
self.loginButton = Button(self, text='Log In', command=self.start)
self.loginButton.pack(side=RIGHT)
|
First screen that user will see, will require Facebook credentials to be inputted
|
GUI_app.py
|
loginScreen
|
Howardlinsanity/CIS-433-Group-Project-W18
| 0
|
python
|
def loginScreen(self):
'\n \n '
self.h = 150
self.w = 350
self.resetWindow()
self.parent.title('Welcome')
emailFrame = Frame(self)
emailFrame.pack(fill=X, side=TOP)
emailLabel = Label(emailFrame, text='Email:', background='white')
emailLabel.pack(side=LEFT, padx=15, pady=10)
self.emailEntry = Entry(emailFrame, width=30)
self.emailEntry.insert(0, self.email)
self.emailEntry.pack(side=LEFT, padx=35, pady=10)
passwordFrame = Frame(self)
passwordFrame.pack(fill=X, side=TOP)
passwordLabel = Label(passwordFrame, text='Password:', background='white')
passwordLabel.pack(side=LEFT, padx=15, pady=10)
self.passwordEntry = Entry(passwordFrame, show='*', width=30)
self.passwordEntry.bind('<Return>', self.start)
self.passwordEntry.insert(0, self.password)
self.passwordEntry.pack(side=LEFT, padx=35, pady=10)
frame = Frame(self, borderwidth=1)
frame.pack(fill=BOTH, expand=True)
self.pack(fill=BOTH, expand=True)
exitButton = Button(self, text='Exit', command=self.parent.destroy)
exitButton.pack(side=RIGHT, padx=5, pady=5)
self.loginButton = Button(self, text='Log In', command=self.start)
self.loginButton.pack(side=RIGHT)
|
def loginScreen(self):
'\n \n '
self.h = 150
self.w = 350
self.resetWindow()
self.parent.title('Welcome')
emailFrame = Frame(self)
emailFrame.pack(fill=X, side=TOP)
emailLabel = Label(emailFrame, text='Email:', background='white')
emailLabel.pack(side=LEFT, padx=15, pady=10)
self.emailEntry = Entry(emailFrame, width=30)
self.emailEntry.insert(0, self.email)
self.emailEntry.pack(side=LEFT, padx=35, pady=10)
passwordFrame = Frame(self)
passwordFrame.pack(fill=X, side=TOP)
passwordLabel = Label(passwordFrame, text='Password:', background='white')
passwordLabel.pack(side=LEFT, padx=15, pady=10)
self.passwordEntry = Entry(passwordFrame, show='*', width=30)
self.passwordEntry.bind('<Return>', self.start)
self.passwordEntry.insert(0, self.password)
self.passwordEntry.pack(side=LEFT, padx=35, pady=10)
frame = Frame(self, borderwidth=1)
frame.pack(fill=BOTH, expand=True)
self.pack(fill=BOTH, expand=True)
exitButton = Button(self, text='Exit', command=self.parent.destroy)
exitButton.pack(side=RIGHT, padx=5, pady=5)
self.loginButton = Button(self, text='Log In', command=self.start)
self.loginButton.pack(side=RIGHT)<|docstring|>First screen that user will see, will require Facebook credentials to be inputted<|endoftext|>
|
74f8af4fb7a3952d17248fd965e52fe3ac769367e8a15130b9da85bfca69ce60
|
def start(self, opt=''):
'\n Initiates login, starts loading screen.\n '
thread1 = ThreadedTask(self.queue, self.login)
thread2 = ThreadedTask(self.queue, self.loadingScreen)
thread2.start()
thread1.start()
self.checkThread(thread1, self.chatUI)
|
Initiates login, starts loading screen.
|
GUI_app.py
|
start
|
Howardlinsanity/CIS-433-Group-Project-W18
| 0
|
python
|
def start(self, opt=):
'\n \n '
thread1 = ThreadedTask(self.queue, self.login)
thread2 = ThreadedTask(self.queue, self.loadingScreen)
thread2.start()
thread1.start()
self.checkThread(thread1, self.chatUI)
|
def start(self, opt=):
'\n \n '
thread1 = ThreadedTask(self.queue, self.login)
thread2 = ThreadedTask(self.queue, self.loadingScreen)
thread2.start()
thread1.start()
self.checkThread(thread1, self.chatUI)<|docstring|>Initiates login, starts loading screen.<|endoftext|>
|
3ad2bd85c3f200fb8b9b53ef1a91110a2a2f017c187fba966c02ed6e9665c8ba
|
def loadingScreen(self):
'\n This starts the loading screen\n and disables all buttons\n '
for i in self.winfo_children():
if (Button == type(i)):
i.configure(state=DISABLED)
self.loadWindow = Toplevel(self.parent)
loadingstring = 'Logging in...'
loadinglabel = Label(self.loadWindow, text=loadingstring, background='white')
progressbar = Progressbar(self.loadWindow, orient='horizontal', length=300, mode='indeterminate')
progressbar.pack(pady=(self.h / 10))
loadinglabel.pack()
self.centerWindow(self.loadWindow)
self.loadWindow.title('Wait')
progressbar.start()
|
This starts the loading screen
and disables all buttons
|
GUI_app.py
|
loadingScreen
|
Howardlinsanity/CIS-433-Group-Project-W18
| 0
|
python
|
def loadingScreen(self):
'\n This starts the loading screen\n and disables all buttons\n '
for i in self.winfo_children():
if (Button == type(i)):
i.configure(state=DISABLED)
self.loadWindow = Toplevel(self.parent)
loadingstring = 'Logging in...'
loadinglabel = Label(self.loadWindow, text=loadingstring, background='white')
progressbar = Progressbar(self.loadWindow, orient='horizontal', length=300, mode='indeterminate')
progressbar.pack(pady=(self.h / 10))
loadinglabel.pack()
self.centerWindow(self.loadWindow)
self.loadWindow.title('Wait')
progressbar.start()
|
def loadingScreen(self):
'\n This starts the loading screen\n and disables all buttons\n '
for i in self.winfo_children():
if (Button == type(i)):
i.configure(state=DISABLED)
self.loadWindow = Toplevel(self.parent)
loadingstring = 'Logging in...'
loadinglabel = Label(self.loadWindow, text=loadingstring, background='white')
progressbar = Progressbar(self.loadWindow, orient='horizontal', length=300, mode='indeterminate')
progressbar.pack(pady=(self.h / 10))
loadinglabel.pack()
self.centerWindow(self.loadWindow)
self.loadWindow.title('Wait')
progressbar.start()<|docstring|>This starts the loading screen
and disables all buttons<|endoftext|>
|
74495032ad2704d71c5b2c8cfcb6f1e3097b0ae770405f8ab6adeeea8c9a011b
|
def login(self):
'\n Login with the inputted credentials from the loginScreen\n '
if (self.client is not None):
if self.client.isLoggedIn():
self.client.logout()
self.email = self.emailEntry.get()
self.password = self.passwordEntry.get()
self.client = GuiClient(self.email, self.password)
print(self.client._fetchInfo(self.client.uid)[self.client.uid].get('first_name'))
self.thread3 = ThreadedTask(self.queue, self.listen)
self.thread3.start()
|
Login with the inputted credentials from the loginScreen
|
GUI_app.py
|
login
|
Howardlinsanity/CIS-433-Group-Project-W18
| 0
|
python
|
def login(self):
'\n \n '
if (self.client is not None):
if self.client.isLoggedIn():
self.client.logout()
self.email = self.emailEntry.get()
self.password = self.passwordEntry.get()
self.client = GuiClient(self.email, self.password)
print(self.client._fetchInfo(self.client.uid)[self.client.uid].get('first_name'))
self.thread3 = ThreadedTask(self.queue, self.listen)
self.thread3.start()
|
def login(self):
'\n \n '
if (self.client is not None):
if self.client.isLoggedIn():
self.client.logout()
self.email = self.emailEntry.get()
self.password = self.passwordEntry.get()
self.client = GuiClient(self.email, self.password)
print(self.client._fetchInfo(self.client.uid)[self.client.uid].get('first_name'))
self.thread3 = ThreadedTask(self.queue, self.listen)
self.thread3.start()<|docstring|>Login with the inputted credentials from the loginScreen<|endoftext|>
|
3b7893c3435c0cdee4395b94fbb66319e43edc19db5f1fb8c3f1b6d951e4b264
|
def listen(self):
'\n We start the listening loop\n '
self.client.listen()
|
We start the listening loop
|
GUI_app.py
|
listen
|
Howardlinsanity/CIS-433-Group-Project-W18
| 0
|
python
|
def listen(self):
'\n \n '
self.client.listen()
|
def listen(self):
'\n \n '
self.client.listen()<|docstring|>We start the listening loop<|endoftext|>
|
de42df0e4b7c88490b8477a65ae681a8bbff152e3c4caee37ef004c7bbb2cd5b
|
def chatUI(self):
'\n Chat GUI page\n '
self.h = 350
self.w = 700
self.resetWindow()
self.parent.title('Messenger')
self.right_frame = Frame(self)
self.right_frame.pack(side=RIGHT, fill='y')
self.messages_frame = Frame(self.right_frame)
self.messages_frame.pack(side=TOP)
self.my_msg = StringVar()
self.my_msg.set('')
self.msg_scrollbar = Scrollbar(self.messages_frame)
self.msg_list = Listbox(self.messages_frame, height=15, width=50, yscrollcommand=self.msg_scrollbar.set)
self.msg_scrollbar.config(command=self.msg_list.yview)
self.msg_scrollbar.pack(side=RIGHT, fill='y', padx=5)
self.msg_list.pack(side=RIGHT)
self.entry_field = Entry(self.right_frame, textvariable=self.my_msg)
self.entry_field.bind('<Return>', self.send)
self.send_button = Button(self.right_frame, text='Send', command=self.send)
self.entry_field.pack(side='top', fill=X, padx=5, pady=5)
self.send_button.pack(side='top')
self.exitButton = Button(self.right_frame, text='Exit', command=self.exit)
self.exitButton.pack(side='bottom', padx=5, pady=5)
self.left_frame = Frame(self)
self.left_frame.pack(side=LEFT, fill='y')
self.usr_scrollbar = Scrollbar(self.left_frame)
self.usr_list = Listbox(self.left_frame, height=15, width=50, yscrollcommand=self.usr_scrollbar.set)
self.usr_scrollbar.config(command=self.usr_list.yview)
self.usr_search_bar = Entry(self.left_frame, textvariable='')
self.usr_search_button = Button(self.left_frame, text='Search', command=self.search)
self.usr_search_bar.pack(side='top', fill=X, pady=2, padx=1)
self.usr_search_button.pack(side='top', fill=X, pady=2, padx=1)
self.usr_scrollbar.pack(side=RIGHT, fill='y', padx=5)
self.usr_list.pack(side=RIGHT, fill='y')
self.search()
self.usr_list.bind('<Double-1>', self.changeConvo)
|
Chat GUI page
|
GUI_app.py
|
chatUI
|
Howardlinsanity/CIS-433-Group-Project-W18
| 0
|
python
|
def chatUI(self):
'\n \n '
self.h = 350
self.w = 700
self.resetWindow()
self.parent.title('Messenger')
self.right_frame = Frame(self)
self.right_frame.pack(side=RIGHT, fill='y')
self.messages_frame = Frame(self.right_frame)
self.messages_frame.pack(side=TOP)
self.my_msg = StringVar()
self.my_msg.set()
self.msg_scrollbar = Scrollbar(self.messages_frame)
self.msg_list = Listbox(self.messages_frame, height=15, width=50, yscrollcommand=self.msg_scrollbar.set)
self.msg_scrollbar.config(command=self.msg_list.yview)
self.msg_scrollbar.pack(side=RIGHT, fill='y', padx=5)
self.msg_list.pack(side=RIGHT)
self.entry_field = Entry(self.right_frame, textvariable=self.my_msg)
self.entry_field.bind('<Return>', self.send)
self.send_button = Button(self.right_frame, text='Send', command=self.send)
self.entry_field.pack(side='top', fill=X, padx=5, pady=5)
self.send_button.pack(side='top')
self.exitButton = Button(self.right_frame, text='Exit', command=self.exit)
self.exitButton.pack(side='bottom', padx=5, pady=5)
self.left_frame = Frame(self)
self.left_frame.pack(side=LEFT, fill='y')
self.usr_scrollbar = Scrollbar(self.left_frame)
self.usr_list = Listbox(self.left_frame, height=15, width=50, yscrollcommand=self.usr_scrollbar.set)
self.usr_scrollbar.config(command=self.usr_list.yview)
self.usr_search_bar = Entry(self.left_frame, textvariable=)
self.usr_search_button = Button(self.left_frame, text='Search', command=self.search)
self.usr_search_bar.pack(side='top', fill=X, pady=2, padx=1)
self.usr_search_button.pack(side='top', fill=X, pady=2, padx=1)
self.usr_scrollbar.pack(side=RIGHT, fill='y', padx=5)
self.usr_list.pack(side=RIGHT, fill='y')
self.search()
self.usr_list.bind('<Double-1>', self.changeConvo)
|
def chatUI(self):
'\n \n '
self.h = 350
self.w = 700
self.resetWindow()
self.parent.title('Messenger')
self.right_frame = Frame(self)
self.right_frame.pack(side=RIGHT, fill='y')
self.messages_frame = Frame(self.right_frame)
self.messages_frame.pack(side=TOP)
self.my_msg = StringVar()
self.my_msg.set()
self.msg_scrollbar = Scrollbar(self.messages_frame)
self.msg_list = Listbox(self.messages_frame, height=15, width=50, yscrollcommand=self.msg_scrollbar.set)
self.msg_scrollbar.config(command=self.msg_list.yview)
self.msg_scrollbar.pack(side=RIGHT, fill='y', padx=5)
self.msg_list.pack(side=RIGHT)
self.entry_field = Entry(self.right_frame, textvariable=self.my_msg)
self.entry_field.bind('<Return>', self.send)
self.send_button = Button(self.right_frame, text='Send', command=self.send)
self.entry_field.pack(side='top', fill=X, padx=5, pady=5)
self.send_button.pack(side='top')
self.exitButton = Button(self.right_frame, text='Exit', command=self.exit)
self.exitButton.pack(side='bottom', padx=5, pady=5)
self.left_frame = Frame(self)
self.left_frame.pack(side=LEFT, fill='y')
self.usr_scrollbar = Scrollbar(self.left_frame)
self.usr_list = Listbox(self.left_frame, height=15, width=50, yscrollcommand=self.usr_scrollbar.set)
self.usr_scrollbar.config(command=self.usr_list.yview)
self.usr_search_bar = Entry(self.left_frame, textvariable=)
self.usr_search_button = Button(self.left_frame, text='Search', command=self.search)
self.usr_search_bar.pack(side='top', fill=X, pady=2, padx=1)
self.usr_search_button.pack(side='top', fill=X, pady=2, padx=1)
self.usr_scrollbar.pack(side=RIGHT, fill='y', padx=5)
self.usr_list.pack(side=RIGHT, fill='y')
self.search()
self.usr_list.bind('<Double-1>', self.changeConvo)<|docstring|>Chat GUI page<|endoftext|>
|
f1f8953c8e380380c5421aa14ab5077a59bfcdbe8759cf9858fce36177dc8d92
|
def send(self, _=''):
'\n Send messages, will send whatever is in the message field and then clear it\n '
plaintext = self.entry_field.get()
key = randint((- 60), 60)
ciphertext = Encrypt.encrypt(plaintext, key)
ciphertext = '{}Q_Q{}'.format(key, ciphertext)
message = Message(text=unicode(ciphertext, 'ascii'))
self.client.send(message, self.currentUser.uid)
self.entry_field.delete(0, END)
self.client.most_recent_message = message
self.msg_list.insert(0, ((self.name + ': ') + plaintext))
self.msg_list.see(END)
|
Send messages, will send whatever is in the message field and then clear it
|
GUI_app.py
|
send
|
Howardlinsanity/CIS-433-Group-Project-W18
| 0
|
python
|
def send(self, _=):
'\n \n '
plaintext = self.entry_field.get()
key = randint((- 60), 60)
ciphertext = Encrypt.encrypt(plaintext, key)
ciphertext = '{}Q_Q{}'.format(key, ciphertext)
message = Message(text=unicode(ciphertext, 'ascii'))
self.client.send(message, self.currentUser.uid)
self.entry_field.delete(0, END)
self.client.most_recent_message = message
self.msg_list.insert(0, ((self.name + ': ') + plaintext))
self.msg_list.see(END)
|
def send(self, _=):
'\n \n '
plaintext = self.entry_field.get()
key = randint((- 60), 60)
ciphertext = Encrypt.encrypt(plaintext, key)
ciphertext = '{}Q_Q{}'.format(key, ciphertext)
message = Message(text=unicode(ciphertext, 'ascii'))
self.client.send(message, self.currentUser.uid)
self.entry_field.delete(0, END)
self.client.most_recent_message = message
self.msg_list.insert(0, ((self.name + ': ') + plaintext))
self.msg_list.see(END)<|docstring|>Send messages, will send whatever is in the message field and then clear it<|endoftext|>
|
338e468af58cc6b04c037d2df837a05f3872eeda7e7690eb967a1ab09125ee05
|
def changeConvo(self, param):
'\n When you click on another user in the chat we update the page\n '
print('CHANGING CONVO')
selectionIndex = self.usr_list.curselection()
self.currentUser = self.users[selectionIndex[0]]
self.changingConvo = True
self.updateConversation()
|
When you click on another user in the chat we update the page
|
GUI_app.py
|
changeConvo
|
Howardlinsanity/CIS-433-Group-Project-W18
| 0
|
python
|
def changeConvo(self, param):
'\n \n '
print('CHANGING CONVO')
selectionIndex = self.usr_list.curselection()
self.currentUser = self.users[selectionIndex[0]]
self.changingConvo = True
self.updateConversation()
|
def changeConvo(self, param):
'\n \n '
print('CHANGING CONVO')
selectionIndex = self.usr_list.curselection()
self.currentUser = self.users[selectionIndex[0]]
self.changingConvo = True
self.updateConversation()<|docstring|>When you click on another user in the chat we update the page<|endoftext|>
|
8a74604218ef5107dfbaeb6f8faad56c1cdb2e954cd90875092d88ef73187560
|
def updateConversation(self):
'\n Clear the conversation box, reupdate with new conversation, pings facebook server if they got anything\n '
if self.changingConvo:
print('[updateConversation] we are changing conversation')
messages = self.client.fetchThreadMessages(self.currentUser.uid)
self.msg_list.delete(0, END)
for message in messages:
text = self.decrypt_w_uc(message)
self.msg_list.insert(0, ((self.client._fetchInfo(message.author)[message.author]['first_name'] + ': ') + text))
self.msg_list.see(END)
self.changingConvo = False
else:
last_message = self.msg_list.get(END)
if ((self.client is not None) and self.client.isLoggedIn() and (self.client.most_recent_message is not None)):
msg_object = self.client.most_recent_message
msg_author = self.client.most_recent_message.author
name = ''
if (msg_author is None):
msg_author = self.name
else:
name = self.client._fetchInfo(msg_author)[msg_author]['first_name']
text = self.decrypt_w_uc(msg_object)
new_last_message = ((name + ': ') + text)
if (last_message != new_last_message):
if ((name + ': ') in last_message):
while (self.client.most_recent_messages_queue.empty() is not True):
message = self.client.most_recent_messages_queue.get()
text = self.decrypt_w_uc(message)
self.msg_list.insert(END, ((self.client._fetchInfo(message.author)[message.author]['first_name'] + ': ') + text))
self.msg_list.see(END)
else:
messages = self.client.fetchThreadMessages(self.currentUser.uid)
self.msg_list.delete(0, END)
for message in messages:
text = self.decrypt_w_uc(message)
self.msg_list.insert(0, ((self.client._fetchInfo(message.author)[message.author]['first_name'] + ': ') + text))
self.msg_list.see(END)
self.client.most_recent_message = messages[0]
|
Clear the conversation box, reupdate with new conversation, pings facebook server if they got anything
|
GUI_app.py
|
updateConversation
|
Howardlinsanity/CIS-433-Group-Project-W18
| 0
|
python
|
def updateConversation(self):
'\n \n '
if self.changingConvo:
print('[updateConversation] we are changing conversation')
messages = self.client.fetchThreadMessages(self.currentUser.uid)
self.msg_list.delete(0, END)
for message in messages:
text = self.decrypt_w_uc(message)
self.msg_list.insert(0, ((self.client._fetchInfo(message.author)[message.author]['first_name'] + ': ') + text))
self.msg_list.see(END)
self.changingConvo = False
else:
last_message = self.msg_list.get(END)
if ((self.client is not None) and self.client.isLoggedIn() and (self.client.most_recent_message is not None)):
msg_object = self.client.most_recent_message
msg_author = self.client.most_recent_message.author
name =
if (msg_author is None):
msg_author = self.name
else:
name = self.client._fetchInfo(msg_author)[msg_author]['first_name']
text = self.decrypt_w_uc(msg_object)
new_last_message = ((name + ': ') + text)
if (last_message != new_last_message):
if ((name + ': ') in last_message):
while (self.client.most_recent_messages_queue.empty() is not True):
message = self.client.most_recent_messages_queue.get()
text = self.decrypt_w_uc(message)
self.msg_list.insert(END, ((self.client._fetchInfo(message.author)[message.author]['first_name'] + ': ') + text))
self.msg_list.see(END)
else:
messages = self.client.fetchThreadMessages(self.currentUser.uid)
self.msg_list.delete(0, END)
for message in messages:
text = self.decrypt_w_uc(message)
self.msg_list.insert(0, ((self.client._fetchInfo(message.author)[message.author]['first_name'] + ': ') + text))
self.msg_list.see(END)
self.client.most_recent_message = messages[0]
|
def updateConversation(self):
'\n \n '
if self.changingConvo:
print('[updateConversation] we are changing conversation')
messages = self.client.fetchThreadMessages(self.currentUser.uid)
self.msg_list.delete(0, END)
for message in messages:
text = self.decrypt_w_uc(message)
self.msg_list.insert(0, ((self.client._fetchInfo(message.author)[message.author]['first_name'] + ': ') + text))
self.msg_list.see(END)
self.changingConvo = False
else:
last_message = self.msg_list.get(END)
if ((self.client is not None) and self.client.isLoggedIn() and (self.client.most_recent_message is not None)):
msg_object = self.client.most_recent_message
msg_author = self.client.most_recent_message.author
name =
if (msg_author is None):
msg_author = self.name
else:
name = self.client._fetchInfo(msg_author)[msg_author]['first_name']
text = self.decrypt_w_uc(msg_object)
new_last_message = ((name + ': ') + text)
if (last_message != new_last_message):
if ((name + ': ') in last_message):
while (self.client.most_recent_messages_queue.empty() is not True):
message = self.client.most_recent_messages_queue.get()
text = self.decrypt_w_uc(message)
self.msg_list.insert(END, ((self.client._fetchInfo(message.author)[message.author]['first_name'] + ': ') + text))
self.msg_list.see(END)
else:
messages = self.client.fetchThreadMessages(self.currentUser.uid)
self.msg_list.delete(0, END)
for message in messages:
text = self.decrypt_w_uc(message)
self.msg_list.insert(0, ((self.client._fetchInfo(message.author)[message.author]['first_name'] + ': ') + text))
self.msg_list.see(END)
self.client.most_recent_message = messages[0]<|docstring|>Clear the conversation box, reupdate with new conversation, pings facebook server if they got anything<|endoftext|>
|
d8b7e2fc1e2b1f4b52717c35d5cb40e7e6caf8d32bdc637e2df96279dd8dd151
|
def decrypt_w_uc(self, message):
"\n Decrypt with unicode character check - will decrypt when necessary,\n and then convert unicode to ascii so TCL won't freak out\n\n Input: message -> fbchat.models.Message, Message object\n Output: clean_text -> String\n "
clean_text = ''
if ('Q_Q' in message.text):
(key, ciphertext) = message.text.split('Q_Q')
clean_text = Encrypt.decrypt(ciphertext, int(key))
else:
clean_text = message.text
clean_clean_text = ''
for character in clean_text:
if (type(character) is unicode):
clean_clean_text += unicodedata.normalize('NFKD', character).encode('ascii', 'replace')
else:
clean_clean_text += character
return clean_clean_text
|
Decrypt with unicode character check - will decrypt when necessary,
and then convert unicode to ascii so TCL won't freak out
Input: message -> fbchat.models.Message, Message object
Output: clean_text -> String
|
GUI_app.py
|
decrypt_w_uc
|
Howardlinsanity/CIS-433-Group-Project-W18
| 0
|
python
|
def decrypt_w_uc(self, message):
"\n Decrypt with unicode character check - will decrypt when necessary,\n and then convert unicode to ascii so TCL won't freak out\n\n Input: message -> fbchat.models.Message, Message object\n Output: clean_text -> String\n "
clean_text =
if ('Q_Q' in message.text):
(key, ciphertext) = message.text.split('Q_Q')
clean_text = Encrypt.decrypt(ciphertext, int(key))
else:
clean_text = message.text
clean_clean_text =
for character in clean_text:
if (type(character) is unicode):
clean_clean_text += unicodedata.normalize('NFKD', character).encode('ascii', 'replace')
else:
clean_clean_text += character
return clean_clean_text
|
def decrypt_w_uc(self, message):
"\n Decrypt with unicode character check - will decrypt when necessary,\n and then convert unicode to ascii so TCL won't freak out\n\n Input: message -> fbchat.models.Message, Message object\n Output: clean_text -> String\n "
clean_text =
if ('Q_Q' in message.text):
(key, ciphertext) = message.text.split('Q_Q')
clean_text = Encrypt.decrypt(ciphertext, int(key))
else:
clean_text = message.text
clean_clean_text =
for character in clean_text:
if (type(character) is unicode):
clean_clean_text += unicodedata.normalize('NFKD', character).encode('ascii', 'replace')
else:
clean_clean_text += character
return clean_clean_text<|docstring|>Decrypt with unicode character check - will decrypt when necessary,
and then convert unicode to ascii so TCL won't freak out
Input: message -> fbchat.models.Message, Message object
Output: clean_text -> String<|endoftext|>
|
4d28fba98b31a7698687f4bd0d2e88a1ba08eff5850c9e0c7479b13bd57fb5c8
|
def exit(self):
'\n Stops listening and ends GUI\n '
self.client.stopListening()
self.parent.destroy()
|
Stops listening and ends GUI
|
GUI_app.py
|
exit
|
Howardlinsanity/CIS-433-Group-Project-W18
| 0
|
python
|
def exit(self):
'\n \n '
self.client.stopListening()
self.parent.destroy()
|
def exit(self):
'\n \n '
self.client.stopListening()
self.parent.destroy()<|docstring|>Stops listening and ends GUI<|endoftext|>
|
e6b97fc6ad2ffb7dac424dafed308aa9a5e3c31dfd5cc41a86d920cbd4e0ce0a
|
def checkThread(self, thread, function):
'\n This function checks to see if\n the given thread is dead, if it\n is not, it recalls a new checkThread.\n After the thread is dead, it calls the\n given function\n\n @param:\n thread - ThreadedTask\n functoin - a function\n '
if thread.is_alive():
self.parent.after(1000, (lambda : self.checkThread(thread, function)))
else:
function()
|
This function checks to see if
the given thread is dead, if it
is not, it recalls a new checkThread.
After the thread is dead, it calls the
given function
@param:
thread - ThreadedTask
functoin - a function
|
GUI_app.py
|
checkThread
|
Howardlinsanity/CIS-433-Group-Project-W18
| 0
|
python
|
def checkThread(self, thread, function):
'\n This function checks to see if\n the given thread is dead, if it\n is not, it recalls a new checkThread.\n After the thread is dead, it calls the\n given function\n\n @param:\n thread - ThreadedTask\n functoin - a function\n '
if thread.is_alive():
self.parent.after(1000, (lambda : self.checkThread(thread, function)))
else:
function()
|
def checkThread(self, thread, function):
'\n This function checks to see if\n the given thread is dead, if it\n is not, it recalls a new checkThread.\n After the thread is dead, it calls the\n given function\n\n @param:\n thread - ThreadedTask\n functoin - a function\n '
if thread.is_alive():
self.parent.after(1000, (lambda : self.checkThread(thread, function)))
else:
function()<|docstring|>This function checks to see if
the given thread is dead, if it
is not, it recalls a new checkThread.
After the thread is dead, it calls the
given function
@param:
thread - ThreadedTask
functoin - a function<|endoftext|>
|
04c91cfed820c3a2f3432b96aa2a8b28af4cbd7e74ff3cb328b519813447ba3b
|
def __init__(self, queue, function):
'\n Starts the threaded task\n\n @param:\n queue - Queue object\n function - a function\n '
threading.Thread.__init__(self)
self.queue = queue
self.function = function
|
Starts the threaded task
@param:
queue - Queue object
function - a function
|
GUI_app.py
|
__init__
|
Howardlinsanity/CIS-433-Group-Project-W18
| 0
|
python
|
def __init__(self, queue, function):
'\n Starts the threaded task\n\n @param:\n queue - Queue object\n function - a function\n '
threading.Thread.__init__(self)
self.queue = queue
self.function = function
|
def __init__(self, queue, function):
'\n Starts the threaded task\n\n @param:\n queue - Queue object\n function - a function\n '
threading.Thread.__init__(self)
self.queue = queue
self.function = function<|docstring|>Starts the threaded task
@param:
queue - Queue object
function - a function<|endoftext|>
|
4516c6b1b723db2883205d8411e45ee9d8cf423a2f16b605b85d16f301c76119
|
def run(self):
'\n Runs the function\n '
self.function()
|
Runs the function
|
GUI_app.py
|
run
|
Howardlinsanity/CIS-433-Group-Project-W18
| 0
|
python
|
def run(self):
'\n \n '
self.function()
|
def run(self):
'\n \n '
self.function()<|docstring|>Runs the function<|endoftext|>
|
4fe93e013dabf44eb635da98e6916645721fc5a2d8285017e44efe9e159e1e4b
|
def __init__(self, initialvalue='U'):
'\n :param initialvalue: value to be loaded into the bit\n :type initialvalue: int, bool, str\n '
self._value = 'U'
self.set(value=initialvalue)
|
:param initialvalue: value to be loaded into the bit
:type initialvalue: int, bool, str
|
py_std_logic_1164/std_logic.py
|
__init__
|
krcb197/py_std_logic_1164
| 0
|
python
|
def __init__(self, initialvalue='U'):
'\n :param initialvalue: value to be loaded into the bit\n :type initialvalue: int, bool, str\n '
self._value = 'U'
self.set(value=initialvalue)
|
def __init__(self, initialvalue='U'):
'\n :param initialvalue: value to be loaded into the bit\n :type initialvalue: int, bool, str\n '
self._value = 'U'
self.set(value=initialvalue)<|docstring|>:param initialvalue: value to be loaded into the bit
:type initialvalue: int, bool, str<|endoftext|>
|
09aeafafcf57ac5f60af6cd441df67f63bbbb4dd12b6488449d6b176d7a1478f
|
def __xor__(self, other):
'\n perfroms a bitwise xor operation\n :param other:\n :return: self ^ other\n '
return_value = NotImplemented
if issubclass(other.__class__, std_logic):
"\n truth table from std_logic_1164-body.vhdl\n ----------------------------------------------------\n | U X 0 1 Z W L H - | | \n ----------------------------------------------------\n ('U', 'U', 'U', 'U', 'U', 'U', 'U', 'U', 'U'), -- | U |\n ('U', 'X', 'X', 'X', 'X', 'X', 'X', 'X', 'X'), -- | X |\n ('U', 'X', '0', '1', 'X', 'X', '0', '1', 'X'), -- | 0 |\n ('U', 'X', '1', '0', 'X', 'X', '1', '0', 'X'), -- | 1 |\n ('U', 'X', 'X', 'X', 'X', 'X', 'X', 'X', 'X'), -- | Z |\n ('U', 'X', 'X', 'X', 'X', 'X', 'X', 'X', 'X'), -- | W |\n ('U', 'X', '0', '1', 'X', 'X', '0', '1', 'X'), -- | L |\n ('U', 'X', '1', '0', 'X', 'X', '1', '0', 'X'), -- | H |\n ('U', 'X', 'X', 'X', 'X', 'X', 'X', 'X', 'X') -- | - |\n );\n "
if (self == std_logic('U')):
return_value = std_logic('U')
elif ((self == std_logic('X')) or (self == std_logic('-')) or (self == std_logic('W')) or (self == std_logic('Z'))):
if (other == std_logic('U')):
return_value = std_logic('U')
else:
return_value = std_logic('X')
elif ((self == std_logic('1')) or (self == std_logic('H'))):
if (other == std_logic('U')):
return_value = std_logic('U')
elif ((other == std_logic('0')) or (other == std_logic('L'))):
return_value = std_logic(1)
elif ((other == std_logic('1')) or (other == std_logic('H'))):
return_value = std_logic(0)
else:
return_value = std_logic('X')
elif ((self == std_logic('0')) or (self == std_logic('L'))):
if (other == std_logic('U')):
return_value = std_logic('U')
elif ((other == std_logic('0')) or (other == std_logic('L'))):
return_value = std_logic(0)
elif ((other == std_logic('1')) or (other == std_logic('H'))):
return_value = std_logic(1)
else:
return_value = std_logic('X')
else:
raise TypeError('can not perform operation on classes')
return return_value
|
perfroms a bitwise xor operation
:param other:
:return: self ^ other
|
py_std_logic_1164/std_logic.py
|
__xor__
|
krcb197/py_std_logic_1164
| 0
|
python
|
def __xor__(self, other):
'\n perfroms a bitwise xor operation\n :param other:\n :return: self ^ other\n '
return_value = NotImplemented
if issubclass(other.__class__, std_logic):
"\n truth table from std_logic_1164-body.vhdl\n ----------------------------------------------------\n | U X 0 1 Z W L H - | | \n ----------------------------------------------------\n ('U', 'U', 'U', 'U', 'U', 'U', 'U', 'U', 'U'), -- | U |\n ('U', 'X', 'X', 'X', 'X', 'X', 'X', 'X', 'X'), -- | X |\n ('U', 'X', '0', '1', 'X', 'X', '0', '1', 'X'), -- | 0 |\n ('U', 'X', '1', '0', 'X', 'X', '1', '0', 'X'), -- | 1 |\n ('U', 'X', 'X', 'X', 'X', 'X', 'X', 'X', 'X'), -- | Z |\n ('U', 'X', 'X', 'X', 'X', 'X', 'X', 'X', 'X'), -- | W |\n ('U', 'X', '0', '1', 'X', 'X', '0', '1', 'X'), -- | L |\n ('U', 'X', '1', '0', 'X', 'X', '1', '0', 'X'), -- | H |\n ('U', 'X', 'X', 'X', 'X', 'X', 'X', 'X', 'X') -- | - |\n );\n "
if (self == std_logic('U')):
return_value = std_logic('U')
elif ((self == std_logic('X')) or (self == std_logic('-')) or (self == std_logic('W')) or (self == std_logic('Z'))):
if (other == std_logic('U')):
return_value = std_logic('U')
else:
return_value = std_logic('X')
elif ((self == std_logic('1')) or (self == std_logic('H'))):
if (other == std_logic('U')):
return_value = std_logic('U')
elif ((other == std_logic('0')) or (other == std_logic('L'))):
return_value = std_logic(1)
elif ((other == std_logic('1')) or (other == std_logic('H'))):
return_value = std_logic(0)
else:
return_value = std_logic('X')
elif ((self == std_logic('0')) or (self == std_logic('L'))):
if (other == std_logic('U')):
return_value = std_logic('U')
elif ((other == std_logic('0')) or (other == std_logic('L'))):
return_value = std_logic(0)
elif ((other == std_logic('1')) or (other == std_logic('H'))):
return_value = std_logic(1)
else:
return_value = std_logic('X')
else:
raise TypeError('can not perform operation on classes')
return return_value
|
def __xor__(self, other):
'\n perfroms a bitwise xor operation\n :param other:\n :return: self ^ other\n '
return_value = NotImplemented
if issubclass(other.__class__, std_logic):
"\n truth table from std_logic_1164-body.vhdl\n ----------------------------------------------------\n | U X 0 1 Z W L H - | | \n ----------------------------------------------------\n ('U', 'U', 'U', 'U', 'U', 'U', 'U', 'U', 'U'), -- | U |\n ('U', 'X', 'X', 'X', 'X', 'X', 'X', 'X', 'X'), -- | X |\n ('U', 'X', '0', '1', 'X', 'X', '0', '1', 'X'), -- | 0 |\n ('U', 'X', '1', '0', 'X', 'X', '1', '0', 'X'), -- | 1 |\n ('U', 'X', 'X', 'X', 'X', 'X', 'X', 'X', 'X'), -- | Z |\n ('U', 'X', 'X', 'X', 'X', 'X', 'X', 'X', 'X'), -- | W |\n ('U', 'X', '0', '1', 'X', 'X', '0', '1', 'X'), -- | L |\n ('U', 'X', '1', '0', 'X', 'X', '1', '0', 'X'), -- | H |\n ('U', 'X', 'X', 'X', 'X', 'X', 'X', 'X', 'X') -- | - |\n );\n "
if (self == std_logic('U')):
return_value = std_logic('U')
elif ((self == std_logic('X')) or (self == std_logic('-')) or (self == std_logic('W')) or (self == std_logic('Z'))):
if (other == std_logic('U')):
return_value = std_logic('U')
else:
return_value = std_logic('X')
elif ((self == std_logic('1')) or (self == std_logic('H'))):
if (other == std_logic('U')):
return_value = std_logic('U')
elif ((other == std_logic('0')) or (other == std_logic('L'))):
return_value = std_logic(1)
elif ((other == std_logic('1')) or (other == std_logic('H'))):
return_value = std_logic(0)
else:
return_value = std_logic('X')
elif ((self == std_logic('0')) or (self == std_logic('L'))):
if (other == std_logic('U')):
return_value = std_logic('U')
elif ((other == std_logic('0')) or (other == std_logic('L'))):
return_value = std_logic(0)
elif ((other == std_logic('1')) or (other == std_logic('H'))):
return_value = std_logic(1)
else:
return_value = std_logic('X')
else:
raise TypeError('can not perform operation on classes')
return return_value<|docstring|>perfroms a bitwise xor operation
:param other:
:return: self ^ other<|endoftext|>
|
d632b71b6e37503659133a6476d72654b4812953dcbeac2c7d3a7b28e7a1995e
|
def __invert__(self):
"\n truth table from std_logic_1164-body.vhdl\n -------------------------------------------------\n | U X 0 1 Z W L H - |\n -------------------------------------------------\n ('U', 'X', '1', '0', 'X', 'X', '1', '0', 'X')\n\n "
if (self == std_logic('U')):
return_value = std_logic('U')
elif ((self == std_logic('X')) or (self == std_logic('-')) or (self == std_logic('W')) or (self == std_logic('Z'))):
return_value = std_logic('X')
elif ((self == std_logic('0')) or (self == std_logic('L'))):
return_value = std_logic(1)
elif ((self == std_logic('1')) or (self == std_logic('H'))):
return_value = std_logic(0)
return return_value
|
truth table from std_logic_1164-body.vhdl
-------------------------------------------------
| U X 0 1 Z W L H - |
-------------------------------------------------
('U', 'X', '1', '0', 'X', 'X', '1', '0', 'X')
|
py_std_logic_1164/std_logic.py
|
__invert__
|
krcb197/py_std_logic_1164
| 0
|
python
|
def __invert__(self):
"\n truth table from std_logic_1164-body.vhdl\n -------------------------------------------------\n | U X 0 1 Z W L H - |\n -------------------------------------------------\n ('U', 'X', '1', '0', 'X', 'X', '1', '0', 'X')\n\n "
if (self == std_logic('U')):
return_value = std_logic('U')
elif ((self == std_logic('X')) or (self == std_logic('-')) or (self == std_logic('W')) or (self == std_logic('Z'))):
return_value = std_logic('X')
elif ((self == std_logic('0')) or (self == std_logic('L'))):
return_value = std_logic(1)
elif ((self == std_logic('1')) or (self == std_logic('H'))):
return_value = std_logic(0)
return return_value
|
def __invert__(self):
"\n truth table from std_logic_1164-body.vhdl\n -------------------------------------------------\n | U X 0 1 Z W L H - |\n -------------------------------------------------\n ('U', 'X', '1', '0', 'X', 'X', '1', '0', 'X')\n\n "
if (self == std_logic('U')):
return_value = std_logic('U')
elif ((self == std_logic('X')) or (self == std_logic('-')) or (self == std_logic('W')) or (self == std_logic('Z'))):
return_value = std_logic('X')
elif ((self == std_logic('0')) or (self == std_logic('L'))):
return_value = std_logic(1)
elif ((self == std_logic('1')) or (self == std_logic('H'))):
return_value = std_logic(0)
return return_value<|docstring|>truth table from std_logic_1164-body.vhdl
-------------------------------------------------
| U X 0 1 Z W L H - |
-------------------------------------------------
('U', 'X', '1', '0', 'X', 'X', '1', '0', 'X')<|endoftext|>
|
6a25c6a83b7258262b5153bef347a1ee01241483e227530f167e6153e654f6bc
|
def set(self, value):
'\n in place value set\n\n :param value: value to be loaded into the bit\n :type value: int, bool, str\n\n '
if isinstance(value, str):
if (len(value) != 1):
raise ValueError('length is not 1')
if ((value == 'U') or (value == 'X') or (value == '0') or (value == '1') or (value == 'Z') or (value == 'W') or (value == 'L') or (value == 'H') or (value == '-')):
self._value = value
else:
raise ValueError('Unsupported value, only U,X,0,1,Z,W,L,H or - is permitted')
elif isinstance(value, bool):
if (value is False):
self._value = '0'
elif (value is True):
self._value = '1'
else:
raise ValueError('Illegal boolean value')
elif isinstance(value, int):
if ((value == 0) or (value == 1)):
self._value = str(value)
assert ((self._value == '1') or (self._value == '0'))
else:
raise ValueError('Unsupported integer value, only 0 or 1 is permitted')
else:
raise ValueError('Unsupported type')
|
in place value set
:param value: value to be loaded into the bit
:type value: int, bool, str
|
py_std_logic_1164/std_logic.py
|
set
|
krcb197/py_std_logic_1164
| 0
|
python
|
def set(self, value):
'\n in place value set\n\n :param value: value to be loaded into the bit\n :type value: int, bool, str\n\n '
if isinstance(value, str):
if (len(value) != 1):
raise ValueError('length is not 1')
if ((value == 'U') or (value == 'X') or (value == '0') or (value == '1') or (value == 'Z') or (value == 'W') or (value == 'L') or (value == 'H') or (value == '-')):
self._value = value
else:
raise ValueError('Unsupported value, only U,X,0,1,Z,W,L,H or - is permitted')
elif isinstance(value, bool):
if (value is False):
self._value = '0'
elif (value is True):
self._value = '1'
else:
raise ValueError('Illegal boolean value')
elif isinstance(value, int):
if ((value == 0) or (value == 1)):
self._value = str(value)
assert ((self._value == '1') or (self._value == '0'))
else:
raise ValueError('Unsupported integer value, only 0 or 1 is permitted')
else:
raise ValueError('Unsupported type')
|
def set(self, value):
'\n in place value set\n\n :param value: value to be loaded into the bit\n :type value: int, bool, str\n\n '
if isinstance(value, str):
if (len(value) != 1):
raise ValueError('length is not 1')
if ((value == 'U') or (value == 'X') or (value == '0') or (value == '1') or (value == 'Z') or (value == 'W') or (value == 'L') or (value == 'H') or (value == '-')):
self._value = value
else:
raise ValueError('Unsupported value, only U,X,0,1,Z,W,L,H or - is permitted')
elif isinstance(value, bool):
if (value is False):
self._value = '0'
elif (value is True):
self._value = '1'
else:
raise ValueError('Illegal boolean value')
elif isinstance(value, int):
if ((value == 0) or (value == 1)):
self._value = str(value)
assert ((self._value == '1') or (self._value == '0'))
else:
raise ValueError('Unsupported integer value, only 0 or 1 is permitted')
else:
raise ValueError('Unsupported type')<|docstring|>in place value set
:param value: value to be loaded into the bit
:type value: int, bool, str<|endoftext|>
|
326cb9bbc6338335192140dc9e3cfea4422f4f69efcf0b869cf9d0f35fc01106
|
@staticmethod
def init():
'Initialize the module.'
with StackContext(Privilege.fileaccess):
try:
shutil.rmtree('container/standard/home')
except FileNotFoundError:
pass
os.mkdir('container/standard/home', mode=505)
try:
shutil.rmtree('container/standard/cache')
except FileNotFoundError:
pass
os.mkdir('container/standard/cache', mode=505)
ffi = FFI()
ffi.cdef('int mount(const char source[], const char target[],\n const char filesystemtype[], unsigned long mountflags,\n const void *data);')
ffi.cdef('int umount(const char *target);')
libc = ffi.dlopen('libc.so.6')
with StackContext(Privilege.fullaccess):
libc.umount(b'container/standard/dev')
libc.mount(b'/dev', b'container/standard/dev', b'', MS_BIND, ffi.NULL)
StdChal.null_fd = os.open('/dev/null', (os.O_RDWR | os.O_CLOEXEC))
StdChal.build_cache = {}
StdChal.build_cache_refcount = {}
|
Initialize the module.
|
StdChal.py
|
init
|
pzread/judge
| 25
|
python
|
@staticmethod
def init():
with StackContext(Privilege.fileaccess):
try:
shutil.rmtree('container/standard/home')
except FileNotFoundError:
pass
os.mkdir('container/standard/home', mode=505)
try:
shutil.rmtree('container/standard/cache')
except FileNotFoundError:
pass
os.mkdir('container/standard/cache', mode=505)
ffi = FFI()
ffi.cdef('int mount(const char source[], const char target[],\n const char filesystemtype[], unsigned long mountflags,\n const void *data);')
ffi.cdef('int umount(const char *target);')
libc = ffi.dlopen('libc.so.6')
with StackContext(Privilege.fullaccess):
libc.umount(b'container/standard/dev')
libc.mount(b'/dev', b'container/standard/dev', b, MS_BIND, ffi.NULL)
StdChal.null_fd = os.open('/dev/null', (os.O_RDWR | os.O_CLOEXEC))
StdChal.build_cache = {}
StdChal.build_cache_refcount = {}
|
@staticmethod
def init():
with StackContext(Privilege.fileaccess):
try:
shutil.rmtree('container/standard/home')
except FileNotFoundError:
pass
os.mkdir('container/standard/home', mode=505)
try:
shutil.rmtree('container/standard/cache')
except FileNotFoundError:
pass
os.mkdir('container/standard/cache', mode=505)
ffi = FFI()
ffi.cdef('int mount(const char source[], const char target[],\n const char filesystemtype[], unsigned long mountflags,\n const void *data);')
ffi.cdef('int umount(const char *target);')
libc = ffi.dlopen('libc.so.6')
with StackContext(Privilege.fullaccess):
libc.umount(b'container/standard/dev')
libc.mount(b'/dev', b'container/standard/dev', b, MS_BIND, ffi.NULL)
StdChal.null_fd = os.open('/dev/null', (os.O_RDWR | os.O_CLOEXEC))
StdChal.build_cache = {}
StdChal.build_cache_refcount = {}<|docstring|>Initialize the module.<|endoftext|>
|
046219c25b15b2756822bde02b61708ec7cdfb2f2ac31b76c646f3c955f14acb
|
@staticmethod
def get_standard_ugid():
'Generate standard UID/GID.\n\n Returns:\n (int, int): Standard UID/GID\n\n '
StdChal.last_standard_uid += 1
return (StdChal.last_standard_uid, StdChal.last_standard_uid)
|
Generate standard UID/GID.
Returns:
(int, int): Standard UID/GID
|
StdChal.py
|
get_standard_ugid
|
pzread/judge
| 25
|
python
|
@staticmethod
def get_standard_ugid():
'Generate standard UID/GID.\n\n Returns:\n (int, int): Standard UID/GID\n\n '
StdChal.last_standard_uid += 1
return (StdChal.last_standard_uid, StdChal.last_standard_uid)
|
@staticmethod
def get_standard_ugid():
'Generate standard UID/GID.\n\n Returns:\n (int, int): Standard UID/GID\n\n '
StdChal.last_standard_uid += 1
return (StdChal.last_standard_uid, StdChal.last_standard_uid)<|docstring|>Generate standard UID/GID.
Returns:
(int, int): Standard UID/GID<|endoftext|>
|
37545137805ecba248f0970e8dd5ec70c9884bb595e2123f4c6753b5bff51d06
|
@staticmethod
def get_restrict_ugid():
'Generate restrict UID/GID.\n\n Returns:\n (int, int): Restrict UID/GID\n\n '
StdChal.last_restrict_uid += 1
return (StdChal.last_restrict_uid, StdChal.last_restrict_uid)
|
Generate restrict UID/GID.
Returns:
(int, int): Restrict UID/GID
|
StdChal.py
|
get_restrict_ugid
|
pzread/judge
| 25
|
python
|
@staticmethod
def get_restrict_ugid():
'Generate restrict UID/GID.\n\n Returns:\n (int, int): Restrict UID/GID\n\n '
StdChal.last_restrict_uid += 1
return (StdChal.last_restrict_uid, StdChal.last_restrict_uid)
|
@staticmethod
def get_restrict_ugid():
'Generate restrict UID/GID.\n\n Returns:\n (int, int): Restrict UID/GID\n\n '
StdChal.last_restrict_uid += 1
return (StdChal.last_restrict_uid, StdChal.last_restrict_uid)<|docstring|>Generate restrict UID/GID.
Returns:
(int, int): Restrict UID/GID<|endoftext|>
|
2224da18c3ccaba72cef057f8f396ed4fa2c69e20fa3ff907fa087a59c1394aa
|
@staticmethod
def build_cache_find(res_path):
'Get build cache.\n\n Args:\n res_path (string): Resource path.\n\n Returns:\n (string, int): (cache hash, GID) or None if not found.\n\n '
try:
return StdChal.build_cache[res_path]
except KeyError:
return None
|
Get build cache.
Args:
res_path (string): Resource path.
Returns:
(string, int): (cache hash, GID) or None if not found.
|
StdChal.py
|
build_cache_find
|
pzread/judge
| 25
|
python
|
@staticmethod
def build_cache_find(res_path):
'Get build cache.\n\n Args:\n res_path (string): Resource path.\n\n Returns:\n (string, int): (cache hash, GID) or None if not found.\n\n '
try:
return StdChal.build_cache[res_path]
except KeyError:
return None
|
@staticmethod
def build_cache_find(res_path):
'Get build cache.\n\n Args:\n res_path (string): Resource path.\n\n Returns:\n (string, int): (cache hash, GID) or None if not found.\n\n '
try:
return StdChal.build_cache[res_path]
except KeyError:
return None<|docstring|>Get build cache.
Args:
res_path (string): Resource path.
Returns:
(string, int): (cache hash, GID) or None if not found.<|endoftext|>
|
c7eb5909104dde4ee2ed69c522cfd4d0ad9b7510277283c72d68bf812ba319f3
|
@staticmethod
def build_cache_update(res_path, cache_hash, gid):
'Update build cache.\n\n Args:\n res_path (string): Resource path.\n cache_hash (int): Cache hash.\n gid (int): GID.\n\n Returns:\n None\n\n '
ret = StdChal.build_cache_find(res_path)
if (ret is not None):
StdChal.build_cache_decref(ret[0])
del StdChal.build_cache[res_path]
StdChal.build_cache[res_path] = (cache_hash, gid)
StdChal.build_cache_refcount[cache_hash] = 1
|
Update build cache.
Args:
res_path (string): Resource path.
cache_hash (int): Cache hash.
gid (int): GID.
Returns:
None
|
StdChal.py
|
build_cache_update
|
pzread/judge
| 25
|
python
|
@staticmethod
def build_cache_update(res_path, cache_hash, gid):
'Update build cache.\n\n Args:\n res_path (string): Resource path.\n cache_hash (int): Cache hash.\n gid (int): GID.\n\n Returns:\n None\n\n '
ret = StdChal.build_cache_find(res_path)
if (ret is not None):
StdChal.build_cache_decref(ret[0])
del StdChal.build_cache[res_path]
StdChal.build_cache[res_path] = (cache_hash, gid)
StdChal.build_cache_refcount[cache_hash] = 1
|
@staticmethod
def build_cache_update(res_path, cache_hash, gid):
'Update build cache.\n\n Args:\n res_path (string): Resource path.\n cache_hash (int): Cache hash.\n gid (int): GID.\n\n Returns:\n None\n\n '
ret = StdChal.build_cache_find(res_path)
if (ret is not None):
StdChal.build_cache_decref(ret[0])
del StdChal.build_cache[res_path]
StdChal.build_cache[res_path] = (cache_hash, gid)
StdChal.build_cache_refcount[cache_hash] = 1<|docstring|>Update build cache.
Args:
res_path (string): Resource path.
cache_hash (int): Cache hash.
gid (int): GID.
Returns:
None<|endoftext|>
|
3baef007fc6900bedda18ac7ac05dd791f6367629f14ac594f553fc270d8caf6
|
@staticmethod
def build_cache_incref(cache_hash):
'Increment the refcount of the build cache.\n\n Args:\n cache_hash (int): Cache hash.\n\n Returns:\n None\n\n '
StdChal.build_cache_refcount[cache_hash] += 1
|
Increment the refcount of the build cache.
Args:
cache_hash (int): Cache hash.
Returns:
None
|
StdChal.py
|
build_cache_incref
|
pzread/judge
| 25
|
python
|
@staticmethod
def build_cache_incref(cache_hash):
'Increment the refcount of the build cache.\n\n Args:\n cache_hash (int): Cache hash.\n\n Returns:\n None\n\n '
StdChal.build_cache_refcount[cache_hash] += 1
|
@staticmethod
def build_cache_incref(cache_hash):
'Increment the refcount of the build cache.\n\n Args:\n cache_hash (int): Cache hash.\n\n Returns:\n None\n\n '
StdChal.build_cache_refcount[cache_hash] += 1<|docstring|>Increment the refcount of the build cache.
Args:
cache_hash (int): Cache hash.
Returns:
None<|endoftext|>
|
c458f1e29353e029f35f530d31982f570ba112465ee3bbe0a8f769c11614160a
|
@staticmethod
def build_cache_decref(cache_hash):
'Decrement the refcount of the build cache.\n\n Delete the build cache if the refcount = 0.\n\n Args:\n cache_hash (int): Cache hash.\n\n Returns:\n None\n\n '
StdChal.build_cache_refcount[cache_hash] -= 1
if (StdChal.build_cache_refcount[cache_hash] == 0):
with StackContext(Privilege.fileaccess):
shutil.rmtree(('container/standard/cache/%x' % cache_hash))
|
Decrement the refcount of the build cache.
Delete the build cache if the refcount = 0.
Args:
cache_hash (int): Cache hash.
Returns:
None
|
StdChal.py
|
build_cache_decref
|
pzread/judge
| 25
|
python
|
@staticmethod
def build_cache_decref(cache_hash):
'Decrement the refcount of the build cache.\n\n Delete the build cache if the refcount = 0.\n\n Args:\n cache_hash (int): Cache hash.\n\n Returns:\n None\n\n '
StdChal.build_cache_refcount[cache_hash] -= 1
if (StdChal.build_cache_refcount[cache_hash] == 0):
with StackContext(Privilege.fileaccess):
shutil.rmtree(('container/standard/cache/%x' % cache_hash))
|
@staticmethod
def build_cache_decref(cache_hash):
'Decrement the refcount of the build cache.\n\n Delete the build cache if the refcount = 0.\n\n Args:\n cache_hash (int): Cache hash.\n\n Returns:\n None\n\n '
StdChal.build_cache_refcount[cache_hash] -= 1
if (StdChal.build_cache_refcount[cache_hash] == 0):
with StackContext(Privilege.fileaccess):
shutil.rmtree(('container/standard/cache/%x' % cache_hash))<|docstring|>Decrement the refcount of the build cache.
Delete the build cache if the refcount = 0.
Args:
cache_hash (int): Cache hash.
Returns:
None<|endoftext|>
|
3eef0d9355b8fc1e8fa432ddb49958584d631f1578a1e356a04b3438aea583b5
|
def __init__(self, chal_id, code_path, comp_typ, judge_typ, res_path, test_list, metadata):
'Initialize.\n\n Args:\n chal_id (int): Challenge ID.\n code_path (string): Code path.\n comp_typ (string): Type of compile.\n judge_typ (string): Type of judge.\n res_path (string): Resource path.\n test_list ([dict]): Test parameter lists.\n metadata (dict): Metadata for judge.\n\n '
StdChal.last_uniqid += 1
self.uniqid = StdChal.last_uniqid
self.code_path = code_path
self.res_path = res_path
self.comp_typ = comp_typ
self.judge_typ = judge_typ
self.test_list = test_list
self.metadata = metadata
self.chal_id = chal_id
self.chal_path = None
StdChal.last_standard_uid += 1
(self.compile_uid, self.compile_gid) = StdChal.get_standard_ugid()
|
Initialize.
Args:
chal_id (int): Challenge ID.
code_path (string): Code path.
comp_typ (string): Type of compile.
judge_typ (string): Type of judge.
res_path (string): Resource path.
test_list ([dict]): Test parameter lists.
metadata (dict): Metadata for judge.
|
StdChal.py
|
__init__
|
pzread/judge
| 25
|
python
|
def __init__(self, chal_id, code_path, comp_typ, judge_typ, res_path, test_list, metadata):
'Initialize.\n\n Args:\n chal_id (int): Challenge ID.\n code_path (string): Code path.\n comp_typ (string): Type of compile.\n judge_typ (string): Type of judge.\n res_path (string): Resource path.\n test_list ([dict]): Test parameter lists.\n metadata (dict): Metadata for judge.\n\n '
StdChal.last_uniqid += 1
self.uniqid = StdChal.last_uniqid
self.code_path = code_path
self.res_path = res_path
self.comp_typ = comp_typ
self.judge_typ = judge_typ
self.test_list = test_list
self.metadata = metadata
self.chal_id = chal_id
self.chal_path = None
StdChal.last_standard_uid += 1
(self.compile_uid, self.compile_gid) = StdChal.get_standard_ugid()
|
def __init__(self, chal_id, code_path, comp_typ, judge_typ, res_path, test_list, metadata):
'Initialize.\n\n Args:\n chal_id (int): Challenge ID.\n code_path (string): Code path.\n comp_typ (string): Type of compile.\n judge_typ (string): Type of judge.\n res_path (string): Resource path.\n test_list ([dict]): Test parameter lists.\n metadata (dict): Metadata for judge.\n\n '
StdChal.last_uniqid += 1
self.uniqid = StdChal.last_uniqid
self.code_path = code_path
self.res_path = res_path
self.comp_typ = comp_typ
self.judge_typ = judge_typ
self.test_list = test_list
self.metadata = metadata
self.chal_id = chal_id
self.chal_path = None
StdChal.last_standard_uid += 1
(self.compile_uid, self.compile_gid) = StdChal.get_standard_ugid()<|docstring|>Initialize.
Args:
chal_id (int): Challenge ID.
code_path (string): Code path.
comp_typ (string): Type of compile.
judge_typ (string): Type of judge.
res_path (string): Resource path.
test_list ([dict]): Test parameter lists.
metadata (dict): Metadata for judge.<|endoftext|>
|
e55dac89c00fe628b3d38f13ae4f5402ea8c469bf6a05553d01aa536516268ef
|
@gen.coroutine
def prefetch(self):
'Prefetch files.'
path_set = set([self.code_path])
for (root, _, files) in os.walk(self.res_path):
for filename in files:
path_set.add(os.path.abspath(os.path.join(root, filename)))
path_list = list(path_set)
proc_list = []
with StackContext(Privilege.fileaccess):
for idx in range(0, len(path_list), 16):
proc_list.append(process.Subprocess((['./Prefetch.py'] + path_list[idx:(idx + 16)]), stdout=process.Subprocess.STREAM))
for proc in proc_list:
(yield proc.stdout.read_bytes(2))
|
Prefetch files.
|
StdChal.py
|
prefetch
|
pzread/judge
| 25
|
python
|
@gen.coroutine
def prefetch(self):
path_set = set([self.code_path])
for (root, _, files) in os.walk(self.res_path):
for filename in files:
path_set.add(os.path.abspath(os.path.join(root, filename)))
path_list = list(path_set)
proc_list = []
with StackContext(Privilege.fileaccess):
for idx in range(0, len(path_list), 16):
proc_list.append(process.Subprocess((['./Prefetch.py'] + path_list[idx:(idx + 16)]), stdout=process.Subprocess.STREAM))
for proc in proc_list:
(yield proc.stdout.read_bytes(2))
|
@gen.coroutine
def prefetch(self):
path_set = set([self.code_path])
for (root, _, files) in os.walk(self.res_path):
for filename in files:
path_set.add(os.path.abspath(os.path.join(root, filename)))
path_list = list(path_set)
proc_list = []
with StackContext(Privilege.fileaccess):
for idx in range(0, len(path_list), 16):
proc_list.append(process.Subprocess((['./Prefetch.py'] + path_list[idx:(idx + 16)]), stdout=process.Subprocess.STREAM))
for proc in proc_list:
(yield proc.stdout.read_bytes(2))<|docstring|>Prefetch files.<|endoftext|>
|
2a1d66c0a803c6c9f403d1223899a7d5577a33aa3cd860f84850410228840c51
|
@gen.coroutine
def start(self):
'Start the challenge.\n\n Returns:\n dict: Challenge result.\n\n '
cache_hash = None
cache_gid = None
if (self.judge_typ in ['ioredir']):
hashproc = process.Subprocess(['./HashDir.py', (self.res_path + '/check')], stdout=process.Subprocess.STREAM)
dirhash = (yield hashproc.stdout.read_until(b'\n'))
dirhash = int(dirhash.decode('utf-8').rstrip('\n'), 16)
ret = StdChal.build_cache_find(self.res_path)
if ((ret is not None) and (ret[0] == dirhash)):
(cache_hash, cache_gid) = ret
judge_ioredir = IORedirJudge('container/standard', ('/cache/%x' % cache_hash))
else:
cache_hash = dirhash
(_, cache_gid) = StdChal.get_standard_ugid()
build_ugid = StdChal.get_standard_ugid()
build_relpath = ('/cache/%x' % cache_hash)
build_path = ('container/standard' + build_relpath)
judge_ioredir = IORedirJudge('container/standard', build_relpath)
if (not (yield judge_ioredir.build(build_ugid, self.res_path))):
return (([(0, 0, STATUS_ERR)] * len(self.test_list)), '')
FileUtils.setperm(build_path, Privilege.JUDGE_UID, cache_gid, umask=488)
with StackContext(Privilege.fullaccess):
os.chmod(build_path, 488)
StdChal.build_cache_update(self.res_path, cache_hash, cache_gid)
print(('StdChal %d built checker %x' % (self.chal_id, cache_hash)))
StdChal.build_cache_incref(cache_hash)
print(('StdChal %d started' % self.chal_id))
self.chal_path = ('container/standard/home/%d' % self.uniqid)
with StackContext(Privilege.fileaccess):
os.mkdir(self.chal_path, mode=505)
try:
(yield self.prefetch())
print(('StdChal %d prefetched' % self.chal_id))
if (self.comp_typ in ['g++', 'clang++']):
(ret, verdict) = (yield self.comp_cxx())
elif (self.comp_typ == 'makefile'):
(ret, verdict) = (yield self.comp_make())
elif (self.comp_typ == 'python3'):
(ret, verdict) = (yield self.comp_python())
if (ret != PyExt.DETECT_NONE):
return ([(0, 0, STATUS_CE, verdict)] * len(self.test_list))
print(('StdChal %d compiled' % self.chal_id))
if (self.comp_typ == 'python3'):
exefile_path = (self.chal_path + '/compile/__pycache__/test.cpython-34.pyc')
exe_path = '/usr/bin/python3.5'
argv = ['./a.out']
envp = ['HOME=/', 'LANG=en_US.UTF-8']
else:
exefile_path = (self.chal_path + '/compile/a.out')
exe_path = './a.out'
argv = []
envp = []
test_future = []
if (self.judge_typ == 'diff'):
for test in self.test_list:
test_future.append(self.judge_diff(exefile_path, exe_path, argv, envp, test['in'], test['ans'], test['timelimit'], test['memlimit']))
elif (self.judge_typ == 'ioredir'):
for test in self.test_list:
(check_uid, _) = StdChal.get_standard_ugid()
(test_uid, test_gid) = StdChal.get_restrict_ugid()
test_future.append(judge_ioredir.judge(exefile_path, exe_path, argv, envp, (check_uid, cache_gid), (test_uid, test_gid), ('/home/%d/run_%d' % (self.uniqid, test_uid)), test, self.metadata))
test_result = (yield gen.multi(test_future))
ret_result = list()
for result in test_result:
(test_pass, data, verdict) = result
(runtime, peakmem, error) = data
status = STATUS_ERR
if (error == PyExt.DETECT_NONE):
if (test_pass is True):
status = STATUS_AC
else:
status = STATUS_WA
elif (error == PyExt.DETECT_OOM):
status = STATUS_MLE
elif ((error == PyExt.DETECT_TIMEOUT) or (error == PyExt.DETECT_FORCETIMEOUT)):
status = STATUS_TLE
elif (error == PyExt.DETECT_EXITERR):
status = STATUS_RE
else:
status = STATUS_ERR
ret_result.append((runtime, peakmem, status, verdict))
return ret_result
finally:
if (cache_hash is not None):
StdChal.build_cache_decref(cache_hash)
with StackContext(Privilege.fileaccess):
shutil.rmtree(self.chal_path)
print(('StdChal %d done' % self.chal_id))
|
Start the challenge.
Returns:
dict: Challenge result.
|
StdChal.py
|
start
|
pzread/judge
| 25
|
python
|
@gen.coroutine
def start(self):
'Start the challenge.\n\n Returns:\n dict: Challenge result.\n\n '
cache_hash = None
cache_gid = None
if (self.judge_typ in ['ioredir']):
hashproc = process.Subprocess(['./HashDir.py', (self.res_path + '/check')], stdout=process.Subprocess.STREAM)
dirhash = (yield hashproc.stdout.read_until(b'\n'))
dirhash = int(dirhash.decode('utf-8').rstrip('\n'), 16)
ret = StdChal.build_cache_find(self.res_path)
if ((ret is not None) and (ret[0] == dirhash)):
(cache_hash, cache_gid) = ret
judge_ioredir = IORedirJudge('container/standard', ('/cache/%x' % cache_hash))
else:
cache_hash = dirhash
(_, cache_gid) = StdChal.get_standard_ugid()
build_ugid = StdChal.get_standard_ugid()
build_relpath = ('/cache/%x' % cache_hash)
build_path = ('container/standard' + build_relpath)
judge_ioredir = IORedirJudge('container/standard', build_relpath)
if (not (yield judge_ioredir.build(build_ugid, self.res_path))):
return (([(0, 0, STATUS_ERR)] * len(self.test_list)), )
FileUtils.setperm(build_path, Privilege.JUDGE_UID, cache_gid, umask=488)
with StackContext(Privilege.fullaccess):
os.chmod(build_path, 488)
StdChal.build_cache_update(self.res_path, cache_hash, cache_gid)
print(('StdChal %d built checker %x' % (self.chal_id, cache_hash)))
StdChal.build_cache_incref(cache_hash)
print(('StdChal %d started' % self.chal_id))
self.chal_path = ('container/standard/home/%d' % self.uniqid)
with StackContext(Privilege.fileaccess):
os.mkdir(self.chal_path, mode=505)
try:
(yield self.prefetch())
print(('StdChal %d prefetched' % self.chal_id))
if (self.comp_typ in ['g++', 'clang++']):
(ret, verdict) = (yield self.comp_cxx())
elif (self.comp_typ == 'makefile'):
(ret, verdict) = (yield self.comp_make())
elif (self.comp_typ == 'python3'):
(ret, verdict) = (yield self.comp_python())
if (ret != PyExt.DETECT_NONE):
return ([(0, 0, STATUS_CE, verdict)] * len(self.test_list))
print(('StdChal %d compiled' % self.chal_id))
if (self.comp_typ == 'python3'):
exefile_path = (self.chal_path + '/compile/__pycache__/test.cpython-34.pyc')
exe_path = '/usr/bin/python3.5'
argv = ['./a.out']
envp = ['HOME=/', 'LANG=en_US.UTF-8']
else:
exefile_path = (self.chal_path + '/compile/a.out')
exe_path = './a.out'
argv = []
envp = []
test_future = []
if (self.judge_typ == 'diff'):
for test in self.test_list:
test_future.append(self.judge_diff(exefile_path, exe_path, argv, envp, test['in'], test['ans'], test['timelimit'], test['memlimit']))
elif (self.judge_typ == 'ioredir'):
for test in self.test_list:
(check_uid, _) = StdChal.get_standard_ugid()
(test_uid, test_gid) = StdChal.get_restrict_ugid()
test_future.append(judge_ioredir.judge(exefile_path, exe_path, argv, envp, (check_uid, cache_gid), (test_uid, test_gid), ('/home/%d/run_%d' % (self.uniqid, test_uid)), test, self.metadata))
test_result = (yield gen.multi(test_future))
ret_result = list()
for result in test_result:
(test_pass, data, verdict) = result
(runtime, peakmem, error) = data
status = STATUS_ERR
if (error == PyExt.DETECT_NONE):
if (test_pass is True):
status = STATUS_AC
else:
status = STATUS_WA
elif (error == PyExt.DETECT_OOM):
status = STATUS_MLE
elif ((error == PyExt.DETECT_TIMEOUT) or (error == PyExt.DETECT_FORCETIMEOUT)):
status = STATUS_TLE
elif (error == PyExt.DETECT_EXITERR):
status = STATUS_RE
else:
status = STATUS_ERR
ret_result.append((runtime, peakmem, status, verdict))
return ret_result
finally:
if (cache_hash is not None):
StdChal.build_cache_decref(cache_hash)
with StackContext(Privilege.fileaccess):
shutil.rmtree(self.chal_path)
print(('StdChal %d done' % self.chal_id))
|
@gen.coroutine
def start(self):
'Start the challenge.\n\n Returns:\n dict: Challenge result.\n\n '
cache_hash = None
cache_gid = None
if (self.judge_typ in ['ioredir']):
hashproc = process.Subprocess(['./HashDir.py', (self.res_path + '/check')], stdout=process.Subprocess.STREAM)
dirhash = (yield hashproc.stdout.read_until(b'\n'))
dirhash = int(dirhash.decode('utf-8').rstrip('\n'), 16)
ret = StdChal.build_cache_find(self.res_path)
if ((ret is not None) and (ret[0] == dirhash)):
(cache_hash, cache_gid) = ret
judge_ioredir = IORedirJudge('container/standard', ('/cache/%x' % cache_hash))
else:
cache_hash = dirhash
(_, cache_gid) = StdChal.get_standard_ugid()
build_ugid = StdChal.get_standard_ugid()
build_relpath = ('/cache/%x' % cache_hash)
build_path = ('container/standard' + build_relpath)
judge_ioredir = IORedirJudge('container/standard', build_relpath)
if (not (yield judge_ioredir.build(build_ugid, self.res_path))):
return (([(0, 0, STATUS_ERR)] * len(self.test_list)), )
FileUtils.setperm(build_path, Privilege.JUDGE_UID, cache_gid, umask=488)
with StackContext(Privilege.fullaccess):
os.chmod(build_path, 488)
StdChal.build_cache_update(self.res_path, cache_hash, cache_gid)
print(('StdChal %d built checker %x' % (self.chal_id, cache_hash)))
StdChal.build_cache_incref(cache_hash)
print(('StdChal %d started' % self.chal_id))
self.chal_path = ('container/standard/home/%d' % self.uniqid)
with StackContext(Privilege.fileaccess):
os.mkdir(self.chal_path, mode=505)
try:
(yield self.prefetch())
print(('StdChal %d prefetched' % self.chal_id))
if (self.comp_typ in ['g++', 'clang++']):
(ret, verdict) = (yield self.comp_cxx())
elif (self.comp_typ == 'makefile'):
(ret, verdict) = (yield self.comp_make())
elif (self.comp_typ == 'python3'):
(ret, verdict) = (yield self.comp_python())
if (ret != PyExt.DETECT_NONE):
return ([(0, 0, STATUS_CE, verdict)] * len(self.test_list))
print(('StdChal %d compiled' % self.chal_id))
if (self.comp_typ == 'python3'):
exefile_path = (self.chal_path + '/compile/__pycache__/test.cpython-34.pyc')
exe_path = '/usr/bin/python3.5'
argv = ['./a.out']
envp = ['HOME=/', 'LANG=en_US.UTF-8']
else:
exefile_path = (self.chal_path + '/compile/a.out')
exe_path = './a.out'
argv = []
envp = []
test_future = []
if (self.judge_typ == 'diff'):
for test in self.test_list:
test_future.append(self.judge_diff(exefile_path, exe_path, argv, envp, test['in'], test['ans'], test['timelimit'], test['memlimit']))
elif (self.judge_typ == 'ioredir'):
for test in self.test_list:
(check_uid, _) = StdChal.get_standard_ugid()
(test_uid, test_gid) = StdChal.get_restrict_ugid()
test_future.append(judge_ioredir.judge(exefile_path, exe_path, argv, envp, (check_uid, cache_gid), (test_uid, test_gid), ('/home/%d/run_%d' % (self.uniqid, test_uid)), test, self.metadata))
test_result = (yield gen.multi(test_future))
ret_result = list()
for result in test_result:
(test_pass, data, verdict) = result
(runtime, peakmem, error) = data
status = STATUS_ERR
if (error == PyExt.DETECT_NONE):
if (test_pass is True):
status = STATUS_AC
else:
status = STATUS_WA
elif (error == PyExt.DETECT_OOM):
status = STATUS_MLE
elif ((error == PyExt.DETECT_TIMEOUT) or (error == PyExt.DETECT_FORCETIMEOUT)):
status = STATUS_TLE
elif (error == PyExt.DETECT_EXITERR):
status = STATUS_RE
else:
status = STATUS_ERR
ret_result.append((runtime, peakmem, status, verdict))
return ret_result
finally:
if (cache_hash is not None):
StdChal.build_cache_decref(cache_hash)
with StackContext(Privilege.fileaccess):
shutil.rmtree(self.chal_path)
print(('StdChal %d done' % self.chal_id))<|docstring|>Start the challenge.
Returns:
dict: Challenge result.<|endoftext|>
|
0aad3f456267d3ff8ca43dab5983833aebd5491124cb0db53c373db18d70300d
|
@concurrent.return_future
def comp_cxx(self, callback=None):
'GCC, Clang compile.\n\n Args:\n callback (function): Callback of return_future.\n\n Returns:\n None\n\n '
def _started_cb(task_id):
'Started callback.\n\n Close unused file descriptors after the task is started.\n\n Args:\n task_id (int): Task ID.\n\n Returns:\n None\n\n '
nonlocal errpipe_fd
os.close(errpipe_fd)
def _done_cb(task_id, stat):
'Done callback.\n\n Args:\n task_id (int): Task ID.\n stat (dict): Task result.\n\n Returns:\n None\n\n '
nonlocal compile_path
with StackContext(Privilege.fileaccess):
verfile = open((compile_path + '/verdict.txt'), 'rb')
verdict = ''.join((chr(c) for c in verfile.read(140)))
verfile.close()
callback((stat['detect_error'], verdict))
compile_path = (self.chal_path + '/compile')
with StackContext(Privilege.fileaccess):
os.mkdir(compile_path, mode=504)
shutil.copyfile(self.code_path, (compile_path + '/test.cpp'), follow_symlinks=False)
FileUtils.setperm(compile_path, self.compile_uid, self.compile_gid)
with StackContext(Privilege.fileaccess):
errpipe_fd = os.open((compile_path + '/verdict.txt'), ((os.O_WRONLY | os.O_CREAT) | os.O_CLOEXEC), mode=288)
if (self.comp_typ == 'g++'):
compiler = '/usr/bin/g++'
elif (self.comp_typ == 'clang++'):
compiler = '/usr/bin/clang++'
task_id = PyExt.create_task(compiler, ['-O2', '-std=c++14', '-o', './a.out', './test.cpp'], ['PATH=/usr/bin:/bin', ('TMPDIR=/home/%d/compile' % self.uniqid)], {0: StdChal.null_fd, 1: StdChal.null_fd, 2: errpipe_fd}, ('/home/%d/compile' % self.uniqid), 'container/standard', self.compile_uid, self.compile_gid, 60000, ((1024 * 1024) * 1024), PyExt.RESTRICT_LEVEL_LOW)
if (task_id is None):
os.close(errpipe_fd)
callback((PyExt.DETECT_INTERNALERR, ''))
return
PyExt.start_task(task_id, _done_cb, _started_cb)
|
GCC, Clang compile.
Args:
callback (function): Callback of return_future.
Returns:
None
|
StdChal.py
|
comp_cxx
|
pzread/judge
| 25
|
python
|
@concurrent.return_future
def comp_cxx(self, callback=None):
'GCC, Clang compile.\n\n Args:\n callback (function): Callback of return_future.\n\n Returns:\n None\n\n '
def _started_cb(task_id):
'Started callback.\n\n Close unused file descriptors after the task is started.\n\n Args:\n task_id (int): Task ID.\n\n Returns:\n None\n\n '
nonlocal errpipe_fd
os.close(errpipe_fd)
def _done_cb(task_id, stat):
'Done callback.\n\n Args:\n task_id (int): Task ID.\n stat (dict): Task result.\n\n Returns:\n None\n\n '
nonlocal compile_path
with StackContext(Privilege.fileaccess):
verfile = open((compile_path + '/verdict.txt'), 'rb')
verdict = .join((chr(c) for c in verfile.read(140)))
verfile.close()
callback((stat['detect_error'], verdict))
compile_path = (self.chal_path + '/compile')
with StackContext(Privilege.fileaccess):
os.mkdir(compile_path, mode=504)
shutil.copyfile(self.code_path, (compile_path + '/test.cpp'), follow_symlinks=False)
FileUtils.setperm(compile_path, self.compile_uid, self.compile_gid)
with StackContext(Privilege.fileaccess):
errpipe_fd = os.open((compile_path + '/verdict.txt'), ((os.O_WRONLY | os.O_CREAT) | os.O_CLOEXEC), mode=288)
if (self.comp_typ == 'g++'):
compiler = '/usr/bin/g++'
elif (self.comp_typ == 'clang++'):
compiler = '/usr/bin/clang++'
task_id = PyExt.create_task(compiler, ['-O2', '-std=c++14', '-o', './a.out', './test.cpp'], ['PATH=/usr/bin:/bin', ('TMPDIR=/home/%d/compile' % self.uniqid)], {0: StdChal.null_fd, 1: StdChal.null_fd, 2: errpipe_fd}, ('/home/%d/compile' % self.uniqid), 'container/standard', self.compile_uid, self.compile_gid, 60000, ((1024 * 1024) * 1024), PyExt.RESTRICT_LEVEL_LOW)
if (task_id is None):
os.close(errpipe_fd)
callback((PyExt.DETECT_INTERNALERR, ))
return
PyExt.start_task(task_id, _done_cb, _started_cb)
|
@concurrent.return_future
def comp_cxx(self, callback=None):
'GCC, Clang compile.\n\n Args:\n callback (function): Callback of return_future.\n\n Returns:\n None\n\n '
def _started_cb(task_id):
'Started callback.\n\n Close unused file descriptors after the task is started.\n\n Args:\n task_id (int): Task ID.\n\n Returns:\n None\n\n '
nonlocal errpipe_fd
os.close(errpipe_fd)
def _done_cb(task_id, stat):
'Done callback.\n\n Args:\n task_id (int): Task ID.\n stat (dict): Task result.\n\n Returns:\n None\n\n '
nonlocal compile_path
with StackContext(Privilege.fileaccess):
verfile = open((compile_path + '/verdict.txt'), 'rb')
verdict = .join((chr(c) for c in verfile.read(140)))
verfile.close()
callback((stat['detect_error'], verdict))
compile_path = (self.chal_path + '/compile')
with StackContext(Privilege.fileaccess):
os.mkdir(compile_path, mode=504)
shutil.copyfile(self.code_path, (compile_path + '/test.cpp'), follow_symlinks=False)
FileUtils.setperm(compile_path, self.compile_uid, self.compile_gid)
with StackContext(Privilege.fileaccess):
errpipe_fd = os.open((compile_path + '/verdict.txt'), ((os.O_WRONLY | os.O_CREAT) | os.O_CLOEXEC), mode=288)
if (self.comp_typ == 'g++'):
compiler = '/usr/bin/g++'
elif (self.comp_typ == 'clang++'):
compiler = '/usr/bin/clang++'
task_id = PyExt.create_task(compiler, ['-O2', '-std=c++14', '-o', './a.out', './test.cpp'], ['PATH=/usr/bin:/bin', ('TMPDIR=/home/%d/compile' % self.uniqid)], {0: StdChal.null_fd, 1: StdChal.null_fd, 2: errpipe_fd}, ('/home/%d/compile' % self.uniqid), 'container/standard', self.compile_uid, self.compile_gid, 60000, ((1024 * 1024) * 1024), PyExt.RESTRICT_LEVEL_LOW)
if (task_id is None):
os.close(errpipe_fd)
callback((PyExt.DETECT_INTERNALERR, ))
return
PyExt.start_task(task_id, _done_cb, _started_cb)<|docstring|>GCC, Clang compile.
Args:
callback (function): Callback of return_future.
Returns:
None<|endoftext|>
|
18948049b975bd0a40c2b4cd36fd43ae764a68f722a401efbe3792b3495cdddc
|
@concurrent.return_future
def comp_make(self, callback=None):
'Makefile compile.\n\n Args:\n callback (function): Callback of return_future.\n\n Returns:\n None\n\n '
def _done_cb(task_id, stat):
'Done callback.\n\n Args:\n task_id (int): Task ID.\n stat (dict): Task result.\n\n Returns:\n None\n\n '
callback((stat['detect_error'], ''))
make_path = (self.chal_path + '/compile')
FileUtils.copydir((self.res_path + '/make'), make_path)
with StackContext(Privilege.fileaccess):
shutil.copyfile(self.code_path, (make_path + '/main.cpp'), follow_symlinks=False)
FileUtils.setperm(make_path, self.compile_uid, self.compile_gid)
with StackContext(Privilege.fullaccess):
os.chmod(make_path, mode=504)
task_id = PyExt.create_task('/usr/bin/make', [], ['PATH=/usr/bin:/bin', ('TMPDIR=/home/%d/compile' % self.uniqid), 'OUT=./a.out'], {0: StdChal.null_fd, 1: StdChal.null_fd, 2: StdChal.null_fd}, ('/home/%d/compile' % self.uniqid), 'container/standard', self.compile_uid, self.compile_gid, 60000, ((1024 * 1024) * 1024), PyExt.RESTRICT_LEVEL_LOW)
if (task_id is None):
callback((PyExt.DETECT_INTERNALERR, ''))
else:
PyExt.start_task(task_id, _done_cb)
|
Makefile compile.
Args:
callback (function): Callback of return_future.
Returns:
None
|
StdChal.py
|
comp_make
|
pzread/judge
| 25
|
python
|
@concurrent.return_future
def comp_make(self, callback=None):
'Makefile compile.\n\n Args:\n callback (function): Callback of return_future.\n\n Returns:\n None\n\n '
def _done_cb(task_id, stat):
'Done callback.\n\n Args:\n task_id (int): Task ID.\n stat (dict): Task result.\n\n Returns:\n None\n\n '
callback((stat['detect_error'], ))
make_path = (self.chal_path + '/compile')
FileUtils.copydir((self.res_path + '/make'), make_path)
with StackContext(Privilege.fileaccess):
shutil.copyfile(self.code_path, (make_path + '/main.cpp'), follow_symlinks=False)
FileUtils.setperm(make_path, self.compile_uid, self.compile_gid)
with StackContext(Privilege.fullaccess):
os.chmod(make_path, mode=504)
task_id = PyExt.create_task('/usr/bin/make', [], ['PATH=/usr/bin:/bin', ('TMPDIR=/home/%d/compile' % self.uniqid), 'OUT=./a.out'], {0: StdChal.null_fd, 1: StdChal.null_fd, 2: StdChal.null_fd}, ('/home/%d/compile' % self.uniqid), 'container/standard', self.compile_uid, self.compile_gid, 60000, ((1024 * 1024) * 1024), PyExt.RESTRICT_LEVEL_LOW)
if (task_id is None):
callback((PyExt.DETECT_INTERNALERR, ))
else:
PyExt.start_task(task_id, _done_cb)
|
@concurrent.return_future
def comp_make(self, callback=None):
'Makefile compile.\n\n Args:\n callback (function): Callback of return_future.\n\n Returns:\n None\n\n '
def _done_cb(task_id, stat):
'Done callback.\n\n Args:\n task_id (int): Task ID.\n stat (dict): Task result.\n\n Returns:\n None\n\n '
callback((stat['detect_error'], ))
make_path = (self.chal_path + '/compile')
FileUtils.copydir((self.res_path + '/make'), make_path)
with StackContext(Privilege.fileaccess):
shutil.copyfile(self.code_path, (make_path + '/main.cpp'), follow_symlinks=False)
FileUtils.setperm(make_path, self.compile_uid, self.compile_gid)
with StackContext(Privilege.fullaccess):
os.chmod(make_path, mode=504)
task_id = PyExt.create_task('/usr/bin/make', [], ['PATH=/usr/bin:/bin', ('TMPDIR=/home/%d/compile' % self.uniqid), 'OUT=./a.out'], {0: StdChal.null_fd, 1: StdChal.null_fd, 2: StdChal.null_fd}, ('/home/%d/compile' % self.uniqid), 'container/standard', self.compile_uid, self.compile_gid, 60000, ((1024 * 1024) * 1024), PyExt.RESTRICT_LEVEL_LOW)
if (task_id is None):
callback((PyExt.DETECT_INTERNALERR, ))
else:
PyExt.start_task(task_id, _done_cb)<|docstring|>Makefile compile.
Args:
callback (function): Callback of return_future.
Returns:
None<|endoftext|>
|
fd0bc81831984dca7103a07d5132ace6f52cfdaee5cc3f4c76be7fdf635d6f83
|
@concurrent.return_future
def comp_python(self, callback=None):
'Python3.4 compile.\n\n Args:\n callback (function): Callback of return_future.\n\n Returns:\n None\n\n '
def _started_cb(task_id):
'Started callback.\n\n Close unused file descriptors after the task is started.\n\n Args:\n task_id (int): Task ID.\n\n Returns:\n None\n\n '
nonlocal errpipe_fd
os.close(errpipe_fd)
def _done_cb(task_id, stat):
'Done callback.\n\n Args:\n task_id (int): Task ID.\n stat (dict): Task result.\n\n Returns:\n None\n\n '
nonlocal compile_path
with StackContext(Privilege.fileaccess):
verfile = open((compile_path + '/verdict.txt'), 'rb')
verdict = ''.join((chr(c) for c in verfile.read(140)))
verfile.close()
callback((stat['detect_error'], verdict))
compile_path = (self.chal_path + '/compile')
with StackContext(Privilege.fileaccess):
os.mkdir(compile_path, mode=504)
shutil.copyfile(self.code_path, (compile_path + '/test.py'), follow_symlinks=False)
FileUtils.setperm(compile_path, self.compile_uid, self.compile_gid)
with StackContext(Privilege.fileaccess):
errpipe_fd = os.open((compile_path + '/verdict.txt'), ((os.O_WRONLY | os.O_CREAT) | os.O_CLOEXEC), mode=288)
task_id = PyExt.create_task('/usr/bin/python3.5', ['-m', 'py_compile', './test.py'], [('HOME=/home/%d/compile' % self.uniqid), 'LANG=en_US.UTF-8'], {0: StdChal.null_fd, 1: StdChal.null_fd, 2: errpipe_fd}, ('/home/%d/compile' % self.uniqid), 'container/standard', self.compile_uid, self.compile_gid, 60000, ((1024 * 1024) * 1024), PyExt.RESTRICT_LEVEL_LOW)
if (task_id is None):
os.close(errpipe_fd)
callback((PyExt.DETECT_INTERNALERR, ''))
return
PyExt.start_task(task_id, _done_cb, _started_cb)
|
Python3.4 compile.
Args:
callback (function): Callback of return_future.
Returns:
None
|
StdChal.py
|
comp_python
|
pzread/judge
| 25
|
python
|
@concurrent.return_future
def comp_python(self, callback=None):
'Python3.4 compile.\n\n Args:\n callback (function): Callback of return_future.\n\n Returns:\n None\n\n '
def _started_cb(task_id):
'Started callback.\n\n Close unused file descriptors after the task is started.\n\n Args:\n task_id (int): Task ID.\n\n Returns:\n None\n\n '
nonlocal errpipe_fd
os.close(errpipe_fd)
def _done_cb(task_id, stat):
'Done callback.\n\n Args:\n task_id (int): Task ID.\n stat (dict): Task result.\n\n Returns:\n None\n\n '
nonlocal compile_path
with StackContext(Privilege.fileaccess):
verfile = open((compile_path + '/verdict.txt'), 'rb')
verdict = .join((chr(c) for c in verfile.read(140)))
verfile.close()
callback((stat['detect_error'], verdict))
compile_path = (self.chal_path + '/compile')
with StackContext(Privilege.fileaccess):
os.mkdir(compile_path, mode=504)
shutil.copyfile(self.code_path, (compile_path + '/test.py'), follow_symlinks=False)
FileUtils.setperm(compile_path, self.compile_uid, self.compile_gid)
with StackContext(Privilege.fileaccess):
errpipe_fd = os.open((compile_path + '/verdict.txt'), ((os.O_WRONLY | os.O_CREAT) | os.O_CLOEXEC), mode=288)
task_id = PyExt.create_task('/usr/bin/python3.5', ['-m', 'py_compile', './test.py'], [('HOME=/home/%d/compile' % self.uniqid), 'LANG=en_US.UTF-8'], {0: StdChal.null_fd, 1: StdChal.null_fd, 2: errpipe_fd}, ('/home/%d/compile' % self.uniqid), 'container/standard', self.compile_uid, self.compile_gid, 60000, ((1024 * 1024) * 1024), PyExt.RESTRICT_LEVEL_LOW)
if (task_id is None):
os.close(errpipe_fd)
callback((PyExt.DETECT_INTERNALERR, ))
return
PyExt.start_task(task_id, _done_cb, _started_cb)
|
@concurrent.return_future
def comp_python(self, callback=None):
'Python3.4 compile.\n\n Args:\n callback (function): Callback of return_future.\n\n Returns:\n None\n\n '
def _started_cb(task_id):
'Started callback.\n\n Close unused file descriptors after the task is started.\n\n Args:\n task_id (int): Task ID.\n\n Returns:\n None\n\n '
nonlocal errpipe_fd
os.close(errpipe_fd)
def _done_cb(task_id, stat):
'Done callback.\n\n Args:\n task_id (int): Task ID.\n stat (dict): Task result.\n\n Returns:\n None\n\n '
nonlocal compile_path
with StackContext(Privilege.fileaccess):
verfile = open((compile_path + '/verdict.txt'), 'rb')
verdict = .join((chr(c) for c in verfile.read(140)))
verfile.close()
callback((stat['detect_error'], verdict))
compile_path = (self.chal_path + '/compile')
with StackContext(Privilege.fileaccess):
os.mkdir(compile_path, mode=504)
shutil.copyfile(self.code_path, (compile_path + '/test.py'), follow_symlinks=False)
FileUtils.setperm(compile_path, self.compile_uid, self.compile_gid)
with StackContext(Privilege.fileaccess):
errpipe_fd = os.open((compile_path + '/verdict.txt'), ((os.O_WRONLY | os.O_CREAT) | os.O_CLOEXEC), mode=288)
task_id = PyExt.create_task('/usr/bin/python3.5', ['-m', 'py_compile', './test.py'], [('HOME=/home/%d/compile' % self.uniqid), 'LANG=en_US.UTF-8'], {0: StdChal.null_fd, 1: StdChal.null_fd, 2: errpipe_fd}, ('/home/%d/compile' % self.uniqid), 'container/standard', self.compile_uid, self.compile_gid, 60000, ((1024 * 1024) * 1024), PyExt.RESTRICT_LEVEL_LOW)
if (task_id is None):
os.close(errpipe_fd)
callback((PyExt.DETECT_INTERNALERR, ))
return
PyExt.start_task(task_id, _done_cb, _started_cb)<|docstring|>Python3.4 compile.
Args:
callback (function): Callback of return_future.
Returns:
None<|endoftext|>
|
f4dbeaa15dddce682f7ab43dcb94496c93ad9e77083eb8cbe92e8bd3eee4710c
|
@concurrent.return_future
def judge_diff(self, src_path, exe_path, argv, envp, in_path, ans_path, timelimit, memlimit, callback=None):
'Diff judge.\n\n Args:\n src_path (string): Executable source path.\n exe_path (string): Executable or interpreter path in the sandbox.\n argv ([string]): List of arguments.\n envp ([string]): List of environment variables.\n in_path (string): Input file path.\n ans_path (string): Answer file path.\n timelimit (int): Timelimit.\n memlimit (int): Memlimit.\n callback (function): Callback of return_future.\n\n Returns:\n None\n\n '
def _started_cb(task_id):
'Started callback.\n\n Close unused file descriptors after the task is started.\n\n Args:\n task_id (int): Task ID.\n\n Returns:\n None\n\n '
nonlocal infile_fd
nonlocal outpipe_fd
os.close(infile_fd)
os.close(outpipe_fd[1])
IOLoop.instance().add_handler(outpipe_fd[0], _diff_out, (IOLoop.READ | IOLoop.ERROR))
def _done_cb(task_id, stat):
'Done callback.\n\n Args:\n task_id (int): Task ID.\n stat (dict): Task result.\n\n Returns:\n None\n\n '
nonlocal result_stat
nonlocal result_pass
result_stat = (stat['utime'], stat['peakmem'], stat['detect_error'])
if (result_pass is not None):
callback((result_pass, result_stat, ''))
def _diff_out(evfd, events):
'Diff the output of the task.\n\n Args:\n evfd (int): Event file descriptor.\n events (int): Event flags.\n\n Returns:\n None\n\n '
nonlocal outpipe_fd
nonlocal ansfile
nonlocal result_stat
nonlocal result_pass
end_flag = False
if (events & IOLoop.READ):
while True:
try:
data = os.read(outpipe_fd[0], 65536)
except BlockingIOError:
break
ansdata = ansfile.read(len(data))
if (data != ansdata):
result_pass = False
end_flag = True
break
if (len(ansdata) == 0):
if (len(ansfile.read(1)) == 0):
result_pass = True
else:
result_pass = False
end_flag = True
break
if ((events & IOLoop.ERROR) or end_flag):
if (result_pass is None):
if (len(ansfile.read(1)) == 0):
result_pass = True
else:
result_pass = False
IOLoop.instance().remove_handler(evfd)
os.close(outpipe_fd[0])
ansfile.close()
if (result_stat is not None):
callback((result_pass, result_stat, ''))
(judge_uid, judge_gid) = StdChal.get_restrict_ugid()
with StackContext(Privilege.fileaccess):
infile_fd = os.open(in_path, (os.O_RDONLY | os.O_CLOEXEC))
ansfile = open(ans_path, 'rb')
outpipe_fd = os.pipe2(os.O_CLOEXEC)
fcntl.fcntl(outpipe_fd[0], fcntl.F_SETFL, os.O_NONBLOCK)
result_stat = None
result_pass = None
with StackContext(Privilege.fileaccess):
judge_path = (self.chal_path + ('/run_%d' % judge_uid))
os.mkdir(judge_path, mode=505)
shutil.copyfile(src_path, (judge_path + '/a.out'), follow_symlinks=False)
with StackContext(Privilege.fullaccess):
os.chown((judge_path + '/a.out'), judge_uid, judge_gid)
os.chmod((judge_path + '/a.out'), 320)
task_id = PyExt.create_task(exe_path, argv, envp, {0: infile_fd, 1: outpipe_fd[1], 2: outpipe_fd[1]}, ('/home/%d/run_%d' % (self.uniqid, judge_uid)), 'container/standard', judge_uid, judge_gid, timelimit, memlimit, PyExt.RESTRICT_LEVEL_HIGH)
if (task_id is None):
os.close(infile_fd)
os.close(outpipe_fd[0])
os.close(outpipe_fd[1])
ansfile.close()
callback((False, (0, 0, PyExt.DETECT_INTERNALERR), ''))
else:
PyExt.start_task(task_id, _done_cb, _started_cb)
|
Diff judge.
Args:
src_path (string): Executable source path.
exe_path (string): Executable or interpreter path in the sandbox.
argv ([string]): List of arguments.
envp ([string]): List of environment variables.
in_path (string): Input file path.
ans_path (string): Answer file path.
timelimit (int): Timelimit.
memlimit (int): Memlimit.
callback (function): Callback of return_future.
Returns:
None
|
StdChal.py
|
judge_diff
|
pzread/judge
| 25
|
python
|
@concurrent.return_future
def judge_diff(self, src_path, exe_path, argv, envp, in_path, ans_path, timelimit, memlimit, callback=None):
'Diff judge.\n\n Args:\n src_path (string): Executable source path.\n exe_path (string): Executable or interpreter path in the sandbox.\n argv ([string]): List of arguments.\n envp ([string]): List of environment variables.\n in_path (string): Input file path.\n ans_path (string): Answer file path.\n timelimit (int): Timelimit.\n memlimit (int): Memlimit.\n callback (function): Callback of return_future.\n\n Returns:\n None\n\n '
def _started_cb(task_id):
'Started callback.\n\n Close unused file descriptors after the task is started.\n\n Args:\n task_id (int): Task ID.\n\n Returns:\n None\n\n '
nonlocal infile_fd
nonlocal outpipe_fd
os.close(infile_fd)
os.close(outpipe_fd[1])
IOLoop.instance().add_handler(outpipe_fd[0], _diff_out, (IOLoop.READ | IOLoop.ERROR))
def _done_cb(task_id, stat):
'Done callback.\n\n Args:\n task_id (int): Task ID.\n stat (dict): Task result.\n\n Returns:\n None\n\n '
nonlocal result_stat
nonlocal result_pass
result_stat = (stat['utime'], stat['peakmem'], stat['detect_error'])
if (result_pass is not None):
callback((result_pass, result_stat, ))
def _diff_out(evfd, events):
'Diff the output of the task.\n\n Args:\n evfd (int): Event file descriptor.\n events (int): Event flags.\n\n Returns:\n None\n\n '
nonlocal outpipe_fd
nonlocal ansfile
nonlocal result_stat
nonlocal result_pass
end_flag = False
if (events & IOLoop.READ):
while True:
try:
data = os.read(outpipe_fd[0], 65536)
except BlockingIOError:
break
ansdata = ansfile.read(len(data))
if (data != ansdata):
result_pass = False
end_flag = True
break
if (len(ansdata) == 0):
if (len(ansfile.read(1)) == 0):
result_pass = True
else:
result_pass = False
end_flag = True
break
if ((events & IOLoop.ERROR) or end_flag):
if (result_pass is None):
if (len(ansfile.read(1)) == 0):
result_pass = True
else:
result_pass = False
IOLoop.instance().remove_handler(evfd)
os.close(outpipe_fd[0])
ansfile.close()
if (result_stat is not None):
callback((result_pass, result_stat, ))
(judge_uid, judge_gid) = StdChal.get_restrict_ugid()
with StackContext(Privilege.fileaccess):
infile_fd = os.open(in_path, (os.O_RDONLY | os.O_CLOEXEC))
ansfile = open(ans_path, 'rb')
outpipe_fd = os.pipe2(os.O_CLOEXEC)
fcntl.fcntl(outpipe_fd[0], fcntl.F_SETFL, os.O_NONBLOCK)
result_stat = None
result_pass = None
with StackContext(Privilege.fileaccess):
judge_path = (self.chal_path + ('/run_%d' % judge_uid))
os.mkdir(judge_path, mode=505)
shutil.copyfile(src_path, (judge_path + '/a.out'), follow_symlinks=False)
with StackContext(Privilege.fullaccess):
os.chown((judge_path + '/a.out'), judge_uid, judge_gid)
os.chmod((judge_path + '/a.out'), 320)
task_id = PyExt.create_task(exe_path, argv, envp, {0: infile_fd, 1: outpipe_fd[1], 2: outpipe_fd[1]}, ('/home/%d/run_%d' % (self.uniqid, judge_uid)), 'container/standard', judge_uid, judge_gid, timelimit, memlimit, PyExt.RESTRICT_LEVEL_HIGH)
if (task_id is None):
os.close(infile_fd)
os.close(outpipe_fd[0])
os.close(outpipe_fd[1])
ansfile.close()
callback((False, (0, 0, PyExt.DETECT_INTERNALERR), ))
else:
PyExt.start_task(task_id, _done_cb, _started_cb)
|
@concurrent.return_future
def judge_diff(self, src_path, exe_path, argv, envp, in_path, ans_path, timelimit, memlimit, callback=None):
'Diff judge.\n\n Args:\n src_path (string): Executable source path.\n exe_path (string): Executable or interpreter path in the sandbox.\n argv ([string]): List of arguments.\n envp ([string]): List of environment variables.\n in_path (string): Input file path.\n ans_path (string): Answer file path.\n timelimit (int): Timelimit.\n memlimit (int): Memlimit.\n callback (function): Callback of return_future.\n\n Returns:\n None\n\n '
def _started_cb(task_id):
'Started callback.\n\n Close unused file descriptors after the task is started.\n\n Args:\n task_id (int): Task ID.\n\n Returns:\n None\n\n '
nonlocal infile_fd
nonlocal outpipe_fd
os.close(infile_fd)
os.close(outpipe_fd[1])
IOLoop.instance().add_handler(outpipe_fd[0], _diff_out, (IOLoop.READ | IOLoop.ERROR))
def _done_cb(task_id, stat):
'Done callback.\n\n Args:\n task_id (int): Task ID.\n stat (dict): Task result.\n\n Returns:\n None\n\n '
nonlocal result_stat
nonlocal result_pass
result_stat = (stat['utime'], stat['peakmem'], stat['detect_error'])
if (result_pass is not None):
callback((result_pass, result_stat, ))
def _diff_out(evfd, events):
'Diff the output of the task.\n\n Args:\n evfd (int): Event file descriptor.\n events (int): Event flags.\n\n Returns:\n None\n\n '
nonlocal outpipe_fd
nonlocal ansfile
nonlocal result_stat
nonlocal result_pass
end_flag = False
if (events & IOLoop.READ):
while True:
try:
data = os.read(outpipe_fd[0], 65536)
except BlockingIOError:
break
ansdata = ansfile.read(len(data))
if (data != ansdata):
result_pass = False
end_flag = True
break
if (len(ansdata) == 0):
if (len(ansfile.read(1)) == 0):
result_pass = True
else:
result_pass = False
end_flag = True
break
if ((events & IOLoop.ERROR) or end_flag):
if (result_pass is None):
if (len(ansfile.read(1)) == 0):
result_pass = True
else:
result_pass = False
IOLoop.instance().remove_handler(evfd)
os.close(outpipe_fd[0])
ansfile.close()
if (result_stat is not None):
callback((result_pass, result_stat, ))
(judge_uid, judge_gid) = StdChal.get_restrict_ugid()
with StackContext(Privilege.fileaccess):
infile_fd = os.open(in_path, (os.O_RDONLY | os.O_CLOEXEC))
ansfile = open(ans_path, 'rb')
outpipe_fd = os.pipe2(os.O_CLOEXEC)
fcntl.fcntl(outpipe_fd[0], fcntl.F_SETFL, os.O_NONBLOCK)
result_stat = None
result_pass = None
with StackContext(Privilege.fileaccess):
judge_path = (self.chal_path + ('/run_%d' % judge_uid))
os.mkdir(judge_path, mode=505)
shutil.copyfile(src_path, (judge_path + '/a.out'), follow_symlinks=False)
with StackContext(Privilege.fullaccess):
os.chown((judge_path + '/a.out'), judge_uid, judge_gid)
os.chmod((judge_path + '/a.out'), 320)
task_id = PyExt.create_task(exe_path, argv, envp, {0: infile_fd, 1: outpipe_fd[1], 2: outpipe_fd[1]}, ('/home/%d/run_%d' % (self.uniqid, judge_uid)), 'container/standard', judge_uid, judge_gid, timelimit, memlimit, PyExt.RESTRICT_LEVEL_HIGH)
if (task_id is None):
os.close(infile_fd)
os.close(outpipe_fd[0])
os.close(outpipe_fd[1])
ansfile.close()
callback((False, (0, 0, PyExt.DETECT_INTERNALERR), ))
else:
PyExt.start_task(task_id, _done_cb, _started_cb)<|docstring|>Diff judge.
Args:
src_path (string): Executable source path.
exe_path (string): Executable or interpreter path in the sandbox.
argv ([string]): List of arguments.
envp ([string]): List of environment variables.
in_path (string): Input file path.
ans_path (string): Answer file path.
timelimit (int): Timelimit.
memlimit (int): Memlimit.
callback (function): Callback of return_future.
Returns:
None<|endoftext|>
|
2e9eff076879b169f4df3d1abb1ceaabe9981050be519d52b25f863312689e02
|
def __init__(self, container_path, build_relpath):
'Initialize.\n\n Args:\n container_path (string): Container path.\n build_relpath (string): Relative build path.\n\n '
self.container_path = container_path
self.build_relpath = build_relpath
self.build_path = (container_path + build_relpath)
|
Initialize.
Args:
container_path (string): Container path.
build_relpath (string): Relative build path.
|
StdChal.py
|
__init__
|
pzread/judge
| 25
|
python
|
def __init__(self, container_path, build_relpath):
'Initialize.\n\n Args:\n container_path (string): Container path.\n build_relpath (string): Relative build path.\n\n '
self.container_path = container_path
self.build_relpath = build_relpath
self.build_path = (container_path + build_relpath)
|
def __init__(self, container_path, build_relpath):
'Initialize.\n\n Args:\n container_path (string): Container path.\n build_relpath (string): Relative build path.\n\n '
self.container_path = container_path
self.build_relpath = build_relpath
self.build_path = (container_path + build_relpath)<|docstring|>Initialize.
Args:
container_path (string): Container path.
build_relpath (string): Relative build path.<|endoftext|>
|
802c6f1eb45243f100535da52dfd548192840597fd77034f199e3b02290cf0a7
|
@concurrent.return_future
def build(self, build_ugid, res_path, callback=None):
'Build environment.\n\n Args:\n build_ugid ((int, int)): Build UID/GID.\n res_path (string): Resource path.\n callback (function): Callback of return_future.\n\n Returns:\n None\n\n '
def _done_cb(task_id, stat):
'Done callback.\n\n Args:\n task_id (int): Task ID.\n stat (dict): Task result.\n\n Returns:\n None\n\n '
if (stat['detect_error'] == PyExt.DETECT_NONE):
callback(True)
else:
callback(False)
(build_uid, build_gid) = build_ugid
FileUtils.copydir((res_path + '/check'), self.build_path)
FileUtils.setperm(self.build_path, build_uid, build_gid)
with StackContext(Privilege.fullaccess):
os.chmod(self.build_path, mode=504)
with StackContext(Privilege.fileaccess):
if (not os.path.isfile((self.build_path + '/build'))):
callback(True)
return
with StackContext(Privilege.fullaccess):
os.chmod((self.build_path + '/build'), mode=504)
task_id = PyExt.create_task((self.build_relpath + '/build'), [], ['PATH=/usr/bin:/bin', ('TMPDIR=%s' % self.build_relpath), ('HOME=%s' % self.build_relpath), 'LANG=en_US.UTF-8'], {0: StdChal.null_fd, 1: StdChal.null_fd, 2: StdChal.null_fd}, self.build_relpath, 'container/standard', build_uid, build_gid, 60000, ((1024 * 1024) * 1024), PyExt.RESTRICT_LEVEL_LOW)
if (task_id is None):
callback(False)
else:
PyExt.start_task(task_id, _done_cb)
|
Build environment.
Args:
build_ugid ((int, int)): Build UID/GID.
res_path (string): Resource path.
callback (function): Callback of return_future.
Returns:
None
|
StdChal.py
|
build
|
pzread/judge
| 25
|
python
|
@concurrent.return_future
def build(self, build_ugid, res_path, callback=None):
'Build environment.\n\n Args:\n build_ugid ((int, int)): Build UID/GID.\n res_path (string): Resource path.\n callback (function): Callback of return_future.\n\n Returns:\n None\n\n '
def _done_cb(task_id, stat):
'Done callback.\n\n Args:\n task_id (int): Task ID.\n stat (dict): Task result.\n\n Returns:\n None\n\n '
if (stat['detect_error'] == PyExt.DETECT_NONE):
callback(True)
else:
callback(False)
(build_uid, build_gid) = build_ugid
FileUtils.copydir((res_path + '/check'), self.build_path)
FileUtils.setperm(self.build_path, build_uid, build_gid)
with StackContext(Privilege.fullaccess):
os.chmod(self.build_path, mode=504)
with StackContext(Privilege.fileaccess):
if (not os.path.isfile((self.build_path + '/build'))):
callback(True)
return
with StackContext(Privilege.fullaccess):
os.chmod((self.build_path + '/build'), mode=504)
task_id = PyExt.create_task((self.build_relpath + '/build'), [], ['PATH=/usr/bin:/bin', ('TMPDIR=%s' % self.build_relpath), ('HOME=%s' % self.build_relpath), 'LANG=en_US.UTF-8'], {0: StdChal.null_fd, 1: StdChal.null_fd, 2: StdChal.null_fd}, self.build_relpath, 'container/standard', build_uid, build_gid, 60000, ((1024 * 1024) * 1024), PyExt.RESTRICT_LEVEL_LOW)
if (task_id is None):
callback(False)
else:
PyExt.start_task(task_id, _done_cb)
|
@concurrent.return_future
def build(self, build_ugid, res_path, callback=None):
'Build environment.\n\n Args:\n build_ugid ((int, int)): Build UID/GID.\n res_path (string): Resource path.\n callback (function): Callback of return_future.\n\n Returns:\n None\n\n '
def _done_cb(task_id, stat):
'Done callback.\n\n Args:\n task_id (int): Task ID.\n stat (dict): Task result.\n\n Returns:\n None\n\n '
if (stat['detect_error'] == PyExt.DETECT_NONE):
callback(True)
else:
callback(False)
(build_uid, build_gid) = build_ugid
FileUtils.copydir((res_path + '/check'), self.build_path)
FileUtils.setperm(self.build_path, build_uid, build_gid)
with StackContext(Privilege.fullaccess):
os.chmod(self.build_path, mode=504)
with StackContext(Privilege.fileaccess):
if (not os.path.isfile((self.build_path + '/build'))):
callback(True)
return
with StackContext(Privilege.fullaccess):
os.chmod((self.build_path + '/build'), mode=504)
task_id = PyExt.create_task((self.build_relpath + '/build'), [], ['PATH=/usr/bin:/bin', ('TMPDIR=%s' % self.build_relpath), ('HOME=%s' % self.build_relpath), 'LANG=en_US.UTF-8'], {0: StdChal.null_fd, 1: StdChal.null_fd, 2: StdChal.null_fd}, self.build_relpath, 'container/standard', build_uid, build_gid, 60000, ((1024 * 1024) * 1024), PyExt.RESTRICT_LEVEL_LOW)
if (task_id is None):
callback(False)
else:
PyExt.start_task(task_id, _done_cb)<|docstring|>Build environment.
Args:
build_ugid ((int, int)): Build UID/GID.
res_path (string): Resource path.
callback (function): Callback of return_future.
Returns:
None<|endoftext|>
|
4b8bb0ad2622263e1a33c3112659664e58238612c8ff2cfc5543855f46fd1010
|
@concurrent.return_future
def judge(self, src_path, exe_relpath, argv, envp, check_ugid, test_ugid, test_relpath, test_param, metadata, callback=None):
'I/O redirect special judge.\n\n Args:\n src_path (string): Executable source path.\n exe_relpath (string): Executable or interpreter path in the sandbox.\n argv ([string]): List of arguments.\n envp ([string]): List of environment variables.\n check_ugid (int, int): Check UID/GID.\n test_ugid (int, int): Test UID/GID.\n test_relpath (string): Test relative path.\n test_param (dict): Test parameters.\n metadata (dict): Metadata.\n callback (function): Callback of return_future.\n\n Returns:\n None\n\n '
def _check_started_cb(task_id):
'Check started callback.\n\n Close unused file descriptors after the check is started.\n\n Args:\n task_id (int): Task ID.\n\n Returns:\n None\n\n '
nonlocal inpipe_fd
nonlocal outpipe_fd
nonlocal ansfile_fd
nonlocal check_infile_fd
os.close(inpipe_fd[1])
os.close(outpipe_fd[0])
if (ansfile_fd is not None):
os.close(ansfile_fd)
if (check_infile_fd is not None):
os.close(check_infile_fd)
def _test_started_cb(task_id):
'Test started callback.\n\n Close unused file descriptors after the test is started.\n\n Args:\n task_id (int): Task ID.\n\n Returns:\n None\n\n '
nonlocal inpipe_fd
nonlocal outpipe_fd
nonlocal outfile_fd
nonlocal test_infile_fd
os.close(inpipe_fd[0])
os.close(outpipe_fd[1])
os.close(outfile_fd)
if (test_infile_fd is not None):
os.close(test_infile_fd)
def _done_cb():
'Done callback.'
nonlocal result_stat
nonlocal result_pass
nonlocal verdict_path
if ((result_pass is not None) and (result_stat is not None)):
with StackContext(Privilege.fileaccess):
verfile = open(verdict_path, 'r')
verdict = verfile.read(140)
verfile.close()
callback((result_pass, result_stat, verdict))
return
def _check_done_cb(task_id, stat):
'Check done callback.\n\n Args:\n task_id (int): Task ID.\n stat (dict): Task result.\n\n Returns:\n None\n\n '
nonlocal result_pass
if (stat['detect_error'] == PyExt.DETECT_NONE):
result_pass = True
else:
result_pass = False
_done_cb()
def _test_done_cb(task_id, stat):
'Test done callback.\n\n Args:\n task_id (int): Task ID.\n stat (dict): Task result.\n\n Returns:\n None\n\n '
nonlocal result_stat
result_stat = (stat['utime'], stat['peakmem'], stat['detect_error'])
_done_cb()
result_stat = None
result_pass = None
in_path = test_param['in']
ans_path = test_param['ans']
timelimit = test_param['timelimit']
memlimit = test_param['memlimit']
(check_uid, check_gid) = check_ugid
(test_uid, test_gid) = test_ugid
test_path = (self.container_path + test_relpath)
output_relpath = (test_relpath + '/output.txt')
output_path = (self.container_path + output_relpath)
verdict_relpath = (test_relpath + '/verdict.txt')
verdict_path = (self.container_path + verdict_relpath)
with StackContext(Privilege.fileaccess):
os.mkdir(test_path, mode=505)
shutil.copyfile(src_path, (test_path + '/a.out'), follow_symlinks=False)
with StackContext(Privilege.fullaccess):
os.chown((test_path + '/a.out'), test_uid, test_gid)
os.chmod((test_path + '/a.out'), 320)
with StackContext(Privilege.fileaccess):
try:
check_infile_fd = os.open(in_path, (os.O_RDONLY | os.O_CLOEXEC))
test_infile_fd = os.open(in_path, (os.O_RDONLY | os.O_CLOEXEC))
except (FileNotFoundError, TypeError):
check_infile_fd = None
test_infile_fd = None
try:
ansfile_fd = os.open(ans_path, (os.O_RDONLY | os.O_CLOEXEC))
except (FileNotFoundError, TypeError):
ansfile_fd = None
outfile_fd = os.open(output_path, ((os.O_WRONLY | os.O_CREAT) | os.O_CLOEXEC), mode=256)
os.close(os.open(verdict_path, (os.O_CREAT | os.O_CLOEXEC), mode=416))
with StackContext(Privilege.fullaccess):
os.chown(output_path, check_uid, check_gid)
os.chown(verdict_path, check_uid, check_gid)
inpipe_fd = os.pipe2(os.O_CLOEXEC)
outpipe_fd = os.pipe2(os.O_CLOEXEC)
check_fdmap = {0: StdChal.null_fd, 1: StdChal.null_fd, 2: StdChal.null_fd}
test_fdmap = {0: StdChal.null_fd, 1: StdChal.null_fd, 2: StdChal.null_fd}
if (check_infile_fd is not None):
check_fdmap[metadata['redir_check']['testin']] = check_infile_fd
if (ansfile_fd is not None):
check_fdmap[metadata['redir_check']['ansin']] = ansfile_fd
check_fdmap[metadata['redir_check']['pipein']] = inpipe_fd[1]
check_fdmap[metadata['redir_check']['pipeout']] = outpipe_fd[0]
try:
del check_fdmap[(- 1)]
except KeyError:
pass
if (test_infile_fd is not None):
test_fdmap[metadata['redir_test']['testin']] = test_infile_fd
test_fdmap[metadata['redir_test']['testout']] = outfile_fd
test_fdmap[metadata['redir_test']['pipein']] = inpipe_fd[0]
test_fdmap[metadata['redir_test']['pipeout']] = outpipe_fd[1]
try:
del test_fdmap[(- 1)]
except KeyError:
pass
check_task_id = PyExt.create_task((self.build_relpath + '/check'), [], ['PATH=/usr/bin:/bin', ('HOME=%s' % self.build_relpath), 'LANG=en_US.UTF-8', ('OUTPUT=%s' % output_relpath), ('VERDICT=%s' % verdict_relpath)], check_fdmap, self.build_relpath, self.container_path, check_uid, check_gid, 60000, ((1024 * 1024) * 1024), PyExt.RESTRICT_LEVEL_LOW)
if (check_task_id is None):
callback((False, (0, 0, PyExt.DETECT_INTERNALERR), ''))
return
PyExt.start_task(check_task_id, _check_done_cb, _check_started_cb)
test_task_id = PyExt.create_task(exe_relpath, argv, envp, test_fdmap, test_relpath, self.container_path, test_uid, test_gid, timelimit, memlimit, PyExt.RESTRICT_LEVEL_HIGH)
if (test_task_id is None):
callback((False, (0, 0, PyExt.DETECT_INTERNALERR), ''))
return
PyExt.start_task(test_task_id, _test_done_cb, _test_started_cb)
|
I/O redirect special judge.
Args:
src_path (string): Executable source path.
exe_relpath (string): Executable or interpreter path in the sandbox.
argv ([string]): List of arguments.
envp ([string]): List of environment variables.
check_ugid (int, int): Check UID/GID.
test_ugid (int, int): Test UID/GID.
test_relpath (string): Test relative path.
test_param (dict): Test parameters.
metadata (dict): Metadata.
callback (function): Callback of return_future.
Returns:
None
|
StdChal.py
|
judge
|
pzread/judge
| 25
|
python
|
@concurrent.return_future
def judge(self, src_path, exe_relpath, argv, envp, check_ugid, test_ugid, test_relpath, test_param, metadata, callback=None):
'I/O redirect special judge.\n\n Args:\n src_path (string): Executable source path.\n exe_relpath (string): Executable or interpreter path in the sandbox.\n argv ([string]): List of arguments.\n envp ([string]): List of environment variables.\n check_ugid (int, int): Check UID/GID.\n test_ugid (int, int): Test UID/GID.\n test_relpath (string): Test relative path.\n test_param (dict): Test parameters.\n metadata (dict): Metadata.\n callback (function): Callback of return_future.\n\n Returns:\n None\n\n '
def _check_started_cb(task_id):
'Check started callback.\n\n Close unused file descriptors after the check is started.\n\n Args:\n task_id (int): Task ID.\n\n Returns:\n None\n\n '
nonlocal inpipe_fd
nonlocal outpipe_fd
nonlocal ansfile_fd
nonlocal check_infile_fd
os.close(inpipe_fd[1])
os.close(outpipe_fd[0])
if (ansfile_fd is not None):
os.close(ansfile_fd)
if (check_infile_fd is not None):
os.close(check_infile_fd)
def _test_started_cb(task_id):
'Test started callback.\n\n Close unused file descriptors after the test is started.\n\n Args:\n task_id (int): Task ID.\n\n Returns:\n None\n\n '
nonlocal inpipe_fd
nonlocal outpipe_fd
nonlocal outfile_fd
nonlocal test_infile_fd
os.close(inpipe_fd[0])
os.close(outpipe_fd[1])
os.close(outfile_fd)
if (test_infile_fd is not None):
os.close(test_infile_fd)
def _done_cb():
'Done callback.'
nonlocal result_stat
nonlocal result_pass
nonlocal verdict_path
if ((result_pass is not None) and (result_stat is not None)):
with StackContext(Privilege.fileaccess):
verfile = open(verdict_path, 'r')
verdict = verfile.read(140)
verfile.close()
callback((result_pass, result_stat, verdict))
return
def _check_done_cb(task_id, stat):
'Check done callback.\n\n Args:\n task_id (int): Task ID.\n stat (dict): Task result.\n\n Returns:\n None\n\n '
nonlocal result_pass
if (stat['detect_error'] == PyExt.DETECT_NONE):
result_pass = True
else:
result_pass = False
_done_cb()
def _test_done_cb(task_id, stat):
'Test done callback.\n\n Args:\n task_id (int): Task ID.\n stat (dict): Task result.\n\n Returns:\n None\n\n '
nonlocal result_stat
result_stat = (stat['utime'], stat['peakmem'], stat['detect_error'])
_done_cb()
result_stat = None
result_pass = None
in_path = test_param['in']
ans_path = test_param['ans']
timelimit = test_param['timelimit']
memlimit = test_param['memlimit']
(check_uid, check_gid) = check_ugid
(test_uid, test_gid) = test_ugid
test_path = (self.container_path + test_relpath)
output_relpath = (test_relpath + '/output.txt')
output_path = (self.container_path + output_relpath)
verdict_relpath = (test_relpath + '/verdict.txt')
verdict_path = (self.container_path + verdict_relpath)
with StackContext(Privilege.fileaccess):
os.mkdir(test_path, mode=505)
shutil.copyfile(src_path, (test_path + '/a.out'), follow_symlinks=False)
with StackContext(Privilege.fullaccess):
os.chown((test_path + '/a.out'), test_uid, test_gid)
os.chmod((test_path + '/a.out'), 320)
with StackContext(Privilege.fileaccess):
try:
check_infile_fd = os.open(in_path, (os.O_RDONLY | os.O_CLOEXEC))
test_infile_fd = os.open(in_path, (os.O_RDONLY | os.O_CLOEXEC))
except (FileNotFoundError, TypeError):
check_infile_fd = None
test_infile_fd = None
try:
ansfile_fd = os.open(ans_path, (os.O_RDONLY | os.O_CLOEXEC))
except (FileNotFoundError, TypeError):
ansfile_fd = None
outfile_fd = os.open(output_path, ((os.O_WRONLY | os.O_CREAT) | os.O_CLOEXEC), mode=256)
os.close(os.open(verdict_path, (os.O_CREAT | os.O_CLOEXEC), mode=416))
with StackContext(Privilege.fullaccess):
os.chown(output_path, check_uid, check_gid)
os.chown(verdict_path, check_uid, check_gid)
inpipe_fd = os.pipe2(os.O_CLOEXEC)
outpipe_fd = os.pipe2(os.O_CLOEXEC)
check_fdmap = {0: StdChal.null_fd, 1: StdChal.null_fd, 2: StdChal.null_fd}
test_fdmap = {0: StdChal.null_fd, 1: StdChal.null_fd, 2: StdChal.null_fd}
if (check_infile_fd is not None):
check_fdmap[metadata['redir_check']['testin']] = check_infile_fd
if (ansfile_fd is not None):
check_fdmap[metadata['redir_check']['ansin']] = ansfile_fd
check_fdmap[metadata['redir_check']['pipein']] = inpipe_fd[1]
check_fdmap[metadata['redir_check']['pipeout']] = outpipe_fd[0]
try:
del check_fdmap[(- 1)]
except KeyError:
pass
if (test_infile_fd is not None):
test_fdmap[metadata['redir_test']['testin']] = test_infile_fd
test_fdmap[metadata['redir_test']['testout']] = outfile_fd
test_fdmap[metadata['redir_test']['pipein']] = inpipe_fd[0]
test_fdmap[metadata['redir_test']['pipeout']] = outpipe_fd[1]
try:
del test_fdmap[(- 1)]
except KeyError:
pass
check_task_id = PyExt.create_task((self.build_relpath + '/check'), [], ['PATH=/usr/bin:/bin', ('HOME=%s' % self.build_relpath), 'LANG=en_US.UTF-8', ('OUTPUT=%s' % output_relpath), ('VERDICT=%s' % verdict_relpath)], check_fdmap, self.build_relpath, self.container_path, check_uid, check_gid, 60000, ((1024 * 1024) * 1024), PyExt.RESTRICT_LEVEL_LOW)
if (check_task_id is None):
callback((False, (0, 0, PyExt.DETECT_INTERNALERR), ))
return
PyExt.start_task(check_task_id, _check_done_cb, _check_started_cb)
test_task_id = PyExt.create_task(exe_relpath, argv, envp, test_fdmap, test_relpath, self.container_path, test_uid, test_gid, timelimit, memlimit, PyExt.RESTRICT_LEVEL_HIGH)
if (test_task_id is None):
callback((False, (0, 0, PyExt.DETECT_INTERNALERR), ))
return
PyExt.start_task(test_task_id, _test_done_cb, _test_started_cb)
|
@concurrent.return_future
def judge(self, src_path, exe_relpath, argv, envp, check_ugid, test_ugid, test_relpath, test_param, metadata, callback=None):
'I/O redirect special judge.\n\n Args:\n src_path (string): Executable source path.\n exe_relpath (string): Executable or interpreter path in the sandbox.\n argv ([string]): List of arguments.\n envp ([string]): List of environment variables.\n check_ugid (int, int): Check UID/GID.\n test_ugid (int, int): Test UID/GID.\n test_relpath (string): Test relative path.\n test_param (dict): Test parameters.\n metadata (dict): Metadata.\n callback (function): Callback of return_future.\n\n Returns:\n None\n\n '
def _check_started_cb(task_id):
'Check started callback.\n\n Close unused file descriptors after the check is started.\n\n Args:\n task_id (int): Task ID.\n\n Returns:\n None\n\n '
nonlocal inpipe_fd
nonlocal outpipe_fd
nonlocal ansfile_fd
nonlocal check_infile_fd
os.close(inpipe_fd[1])
os.close(outpipe_fd[0])
if (ansfile_fd is not None):
os.close(ansfile_fd)
if (check_infile_fd is not None):
os.close(check_infile_fd)
def _test_started_cb(task_id):
'Test started callback.\n\n Close unused file descriptors after the test is started.\n\n Args:\n task_id (int): Task ID.\n\n Returns:\n None\n\n '
nonlocal inpipe_fd
nonlocal outpipe_fd
nonlocal outfile_fd
nonlocal test_infile_fd
os.close(inpipe_fd[0])
os.close(outpipe_fd[1])
os.close(outfile_fd)
if (test_infile_fd is not None):
os.close(test_infile_fd)
def _done_cb():
'Done callback.'
nonlocal result_stat
nonlocal result_pass
nonlocal verdict_path
if ((result_pass is not None) and (result_stat is not None)):
with StackContext(Privilege.fileaccess):
verfile = open(verdict_path, 'r')
verdict = verfile.read(140)
verfile.close()
callback((result_pass, result_stat, verdict))
return
def _check_done_cb(task_id, stat):
'Check done callback.\n\n Args:\n task_id (int): Task ID.\n stat (dict): Task result.\n\n Returns:\n None\n\n '
nonlocal result_pass
if (stat['detect_error'] == PyExt.DETECT_NONE):
result_pass = True
else:
result_pass = False
_done_cb()
def _test_done_cb(task_id, stat):
'Test done callback.\n\n Args:\n task_id (int): Task ID.\n stat (dict): Task result.\n\n Returns:\n None\n\n '
nonlocal result_stat
result_stat = (stat['utime'], stat['peakmem'], stat['detect_error'])
_done_cb()
result_stat = None
result_pass = None
in_path = test_param['in']
ans_path = test_param['ans']
timelimit = test_param['timelimit']
memlimit = test_param['memlimit']
(check_uid, check_gid) = check_ugid
(test_uid, test_gid) = test_ugid
test_path = (self.container_path + test_relpath)
output_relpath = (test_relpath + '/output.txt')
output_path = (self.container_path + output_relpath)
verdict_relpath = (test_relpath + '/verdict.txt')
verdict_path = (self.container_path + verdict_relpath)
with StackContext(Privilege.fileaccess):
os.mkdir(test_path, mode=505)
shutil.copyfile(src_path, (test_path + '/a.out'), follow_symlinks=False)
with StackContext(Privilege.fullaccess):
os.chown((test_path + '/a.out'), test_uid, test_gid)
os.chmod((test_path + '/a.out'), 320)
with StackContext(Privilege.fileaccess):
try:
check_infile_fd = os.open(in_path, (os.O_RDONLY | os.O_CLOEXEC))
test_infile_fd = os.open(in_path, (os.O_RDONLY | os.O_CLOEXEC))
except (FileNotFoundError, TypeError):
check_infile_fd = None
test_infile_fd = None
try:
ansfile_fd = os.open(ans_path, (os.O_RDONLY | os.O_CLOEXEC))
except (FileNotFoundError, TypeError):
ansfile_fd = None
outfile_fd = os.open(output_path, ((os.O_WRONLY | os.O_CREAT) | os.O_CLOEXEC), mode=256)
os.close(os.open(verdict_path, (os.O_CREAT | os.O_CLOEXEC), mode=416))
with StackContext(Privilege.fullaccess):
os.chown(output_path, check_uid, check_gid)
os.chown(verdict_path, check_uid, check_gid)
inpipe_fd = os.pipe2(os.O_CLOEXEC)
outpipe_fd = os.pipe2(os.O_CLOEXEC)
check_fdmap = {0: StdChal.null_fd, 1: StdChal.null_fd, 2: StdChal.null_fd}
test_fdmap = {0: StdChal.null_fd, 1: StdChal.null_fd, 2: StdChal.null_fd}
if (check_infile_fd is not None):
check_fdmap[metadata['redir_check']['testin']] = check_infile_fd
if (ansfile_fd is not None):
check_fdmap[metadata['redir_check']['ansin']] = ansfile_fd
check_fdmap[metadata['redir_check']['pipein']] = inpipe_fd[1]
check_fdmap[metadata['redir_check']['pipeout']] = outpipe_fd[0]
try:
del check_fdmap[(- 1)]
except KeyError:
pass
if (test_infile_fd is not None):
test_fdmap[metadata['redir_test']['testin']] = test_infile_fd
test_fdmap[metadata['redir_test']['testout']] = outfile_fd
test_fdmap[metadata['redir_test']['pipein']] = inpipe_fd[0]
test_fdmap[metadata['redir_test']['pipeout']] = outpipe_fd[1]
try:
del test_fdmap[(- 1)]
except KeyError:
pass
check_task_id = PyExt.create_task((self.build_relpath + '/check'), [], ['PATH=/usr/bin:/bin', ('HOME=%s' % self.build_relpath), 'LANG=en_US.UTF-8', ('OUTPUT=%s' % output_relpath), ('VERDICT=%s' % verdict_relpath)], check_fdmap, self.build_relpath, self.container_path, check_uid, check_gid, 60000, ((1024 * 1024) * 1024), PyExt.RESTRICT_LEVEL_LOW)
if (check_task_id is None):
callback((False, (0, 0, PyExt.DETECT_INTERNALERR), ))
return
PyExt.start_task(check_task_id, _check_done_cb, _check_started_cb)
test_task_id = PyExt.create_task(exe_relpath, argv, envp, test_fdmap, test_relpath, self.container_path, test_uid, test_gid, timelimit, memlimit, PyExt.RESTRICT_LEVEL_HIGH)
if (test_task_id is None):
callback((False, (0, 0, PyExt.DETECT_INTERNALERR), ))
return
PyExt.start_task(test_task_id, _test_done_cb, _test_started_cb)<|docstring|>I/O redirect special judge.
Args:
src_path (string): Executable source path.
exe_relpath (string): Executable or interpreter path in the sandbox.
argv ([string]): List of arguments.
envp ([string]): List of environment variables.
check_ugid (int, int): Check UID/GID.
test_ugid (int, int): Test UID/GID.
test_relpath (string): Test relative path.
test_param (dict): Test parameters.
metadata (dict): Metadata.
callback (function): Callback of return_future.
Returns:
None<|endoftext|>
|
c879d4faa1c5c98bc36da68faca44aaa24f6f40a8668d7c5236b96e956f91738
|
def _started_cb(task_id):
'Started callback.\n\n Close unused file descriptors after the task is started.\n\n Args:\n task_id (int): Task ID.\n\n Returns:\n None\n\n '
nonlocal errpipe_fd
os.close(errpipe_fd)
|
Started callback.
Close unused file descriptors after the task is started.
Args:
task_id (int): Task ID.
Returns:
None
|
StdChal.py
|
_started_cb
|
pzread/judge
| 25
|
python
|
def _started_cb(task_id):
'Started callback.\n\n Close unused file descriptors after the task is started.\n\n Args:\n task_id (int): Task ID.\n\n Returns:\n None\n\n '
nonlocal errpipe_fd
os.close(errpipe_fd)
|
def _started_cb(task_id):
'Started callback.\n\n Close unused file descriptors after the task is started.\n\n Args:\n task_id (int): Task ID.\n\n Returns:\n None\n\n '
nonlocal errpipe_fd
os.close(errpipe_fd)<|docstring|>Started callback.
Close unused file descriptors after the task is started.
Args:
task_id (int): Task ID.
Returns:
None<|endoftext|>
|
719f9aa1b7e1dd4290486a20139340b9550e9cd52ac1714d163438a97baa97bd
|
def _done_cb(task_id, stat):
'Done callback.\n\n Args:\n task_id (int): Task ID.\n stat (dict): Task result.\n\n Returns:\n None\n\n '
nonlocal compile_path
with StackContext(Privilege.fileaccess):
verfile = open((compile_path + '/verdict.txt'), 'rb')
verdict = ''.join((chr(c) for c in verfile.read(140)))
verfile.close()
callback((stat['detect_error'], verdict))
|
Done callback.
Args:
task_id (int): Task ID.
stat (dict): Task result.
Returns:
None
|
StdChal.py
|
_done_cb
|
pzread/judge
| 25
|
python
|
def _done_cb(task_id, stat):
'Done callback.\n\n Args:\n task_id (int): Task ID.\n stat (dict): Task result.\n\n Returns:\n None\n\n '
nonlocal compile_path
with StackContext(Privilege.fileaccess):
verfile = open((compile_path + '/verdict.txt'), 'rb')
verdict = .join((chr(c) for c in verfile.read(140)))
verfile.close()
callback((stat['detect_error'], verdict))
|
def _done_cb(task_id, stat):
'Done callback.\n\n Args:\n task_id (int): Task ID.\n stat (dict): Task result.\n\n Returns:\n None\n\n '
nonlocal compile_path
with StackContext(Privilege.fileaccess):
verfile = open((compile_path + '/verdict.txt'), 'rb')
verdict = .join((chr(c) for c in verfile.read(140)))
verfile.close()
callback((stat['detect_error'], verdict))<|docstring|>Done callback.
Args:
task_id (int): Task ID.
stat (dict): Task result.
Returns:
None<|endoftext|>
|
d97be3e2dd2e86c8a8548f40fe10e9bfac9a684fac3e91317edc303b43db3e29
|
def _done_cb(task_id, stat):
'Done callback.\n\n Args:\n task_id (int): Task ID.\n stat (dict): Task result.\n\n Returns:\n None\n\n '
callback((stat['detect_error'], ''))
|
Done callback.
Args:
task_id (int): Task ID.
stat (dict): Task result.
Returns:
None
|
StdChal.py
|
_done_cb
|
pzread/judge
| 25
|
python
|
def _done_cb(task_id, stat):
'Done callback.\n\n Args:\n task_id (int): Task ID.\n stat (dict): Task result.\n\n Returns:\n None\n\n '
callback((stat['detect_error'], ))
|
def _done_cb(task_id, stat):
'Done callback.\n\n Args:\n task_id (int): Task ID.\n stat (dict): Task result.\n\n Returns:\n None\n\n '
callback((stat['detect_error'], ))<|docstring|>Done callback.
Args:
task_id (int): Task ID.
stat (dict): Task result.
Returns:
None<|endoftext|>
|
c879d4faa1c5c98bc36da68faca44aaa24f6f40a8668d7c5236b96e956f91738
|
def _started_cb(task_id):
'Started callback.\n\n Close unused file descriptors after the task is started.\n\n Args:\n task_id (int): Task ID.\n\n Returns:\n None\n\n '
nonlocal errpipe_fd
os.close(errpipe_fd)
|
Started callback.
Close unused file descriptors after the task is started.
Args:
task_id (int): Task ID.
Returns:
None
|
StdChal.py
|
_started_cb
|
pzread/judge
| 25
|
python
|
def _started_cb(task_id):
'Started callback.\n\n Close unused file descriptors after the task is started.\n\n Args:\n task_id (int): Task ID.\n\n Returns:\n None\n\n '
nonlocal errpipe_fd
os.close(errpipe_fd)
|
def _started_cb(task_id):
'Started callback.\n\n Close unused file descriptors after the task is started.\n\n Args:\n task_id (int): Task ID.\n\n Returns:\n None\n\n '
nonlocal errpipe_fd
os.close(errpipe_fd)<|docstring|>Started callback.
Close unused file descriptors after the task is started.
Args:
task_id (int): Task ID.
Returns:
None<|endoftext|>
|
719f9aa1b7e1dd4290486a20139340b9550e9cd52ac1714d163438a97baa97bd
|
def _done_cb(task_id, stat):
'Done callback.\n\n Args:\n task_id (int): Task ID.\n stat (dict): Task result.\n\n Returns:\n None\n\n '
nonlocal compile_path
with StackContext(Privilege.fileaccess):
verfile = open((compile_path + '/verdict.txt'), 'rb')
verdict = ''.join((chr(c) for c in verfile.read(140)))
verfile.close()
callback((stat['detect_error'], verdict))
|
Done callback.
Args:
task_id (int): Task ID.
stat (dict): Task result.
Returns:
None
|
StdChal.py
|
_done_cb
|
pzread/judge
| 25
|
python
|
def _done_cb(task_id, stat):
'Done callback.\n\n Args:\n task_id (int): Task ID.\n stat (dict): Task result.\n\n Returns:\n None\n\n '
nonlocal compile_path
with StackContext(Privilege.fileaccess):
verfile = open((compile_path + '/verdict.txt'), 'rb')
verdict = .join((chr(c) for c in verfile.read(140)))
verfile.close()
callback((stat['detect_error'], verdict))
|
def _done_cb(task_id, stat):
'Done callback.\n\n Args:\n task_id (int): Task ID.\n stat (dict): Task result.\n\n Returns:\n None\n\n '
nonlocal compile_path
with StackContext(Privilege.fileaccess):
verfile = open((compile_path + '/verdict.txt'), 'rb')
verdict = .join((chr(c) for c in verfile.read(140)))
verfile.close()
callback((stat['detect_error'], verdict))<|docstring|>Done callback.
Args:
task_id (int): Task ID.
stat (dict): Task result.
Returns:
None<|endoftext|>
|
4db65e7f7eac884d7360b40fc10f15058b32f5fb25979fa4596b869cf16f0238
|
def _started_cb(task_id):
'Started callback.\n\n Close unused file descriptors after the task is started.\n\n Args:\n task_id (int): Task ID.\n\n Returns:\n None\n\n '
nonlocal infile_fd
nonlocal outpipe_fd
os.close(infile_fd)
os.close(outpipe_fd[1])
IOLoop.instance().add_handler(outpipe_fd[0], _diff_out, (IOLoop.READ | IOLoop.ERROR))
|
Started callback.
Close unused file descriptors after the task is started.
Args:
task_id (int): Task ID.
Returns:
None
|
StdChal.py
|
_started_cb
|
pzread/judge
| 25
|
python
|
def _started_cb(task_id):
'Started callback.\n\n Close unused file descriptors after the task is started.\n\n Args:\n task_id (int): Task ID.\n\n Returns:\n None\n\n '
nonlocal infile_fd
nonlocal outpipe_fd
os.close(infile_fd)
os.close(outpipe_fd[1])
IOLoop.instance().add_handler(outpipe_fd[0], _diff_out, (IOLoop.READ | IOLoop.ERROR))
|
def _started_cb(task_id):
'Started callback.\n\n Close unused file descriptors after the task is started.\n\n Args:\n task_id (int): Task ID.\n\n Returns:\n None\n\n '
nonlocal infile_fd
nonlocal outpipe_fd
os.close(infile_fd)
os.close(outpipe_fd[1])
IOLoop.instance().add_handler(outpipe_fd[0], _diff_out, (IOLoop.READ | IOLoop.ERROR))<|docstring|>Started callback.
Close unused file descriptors after the task is started.
Args:
task_id (int): Task ID.
Returns:
None<|endoftext|>
|
9c3fcba8e1f7fd075b76c9ae2e0d163e9c48f5116a6eca8cfa7c577abf3f8ced
|
def _done_cb(task_id, stat):
'Done callback.\n\n Args:\n task_id (int): Task ID.\n stat (dict): Task result.\n\n Returns:\n None\n\n '
nonlocal result_stat
nonlocal result_pass
result_stat = (stat['utime'], stat['peakmem'], stat['detect_error'])
if (result_pass is not None):
callback((result_pass, result_stat, ''))
|
Done callback.
Args:
task_id (int): Task ID.
stat (dict): Task result.
Returns:
None
|
StdChal.py
|
_done_cb
|
pzread/judge
| 25
|
python
|
def _done_cb(task_id, stat):
'Done callback.\n\n Args:\n task_id (int): Task ID.\n stat (dict): Task result.\n\n Returns:\n None\n\n '
nonlocal result_stat
nonlocal result_pass
result_stat = (stat['utime'], stat['peakmem'], stat['detect_error'])
if (result_pass is not None):
callback((result_pass, result_stat, ))
|
def _done_cb(task_id, stat):
'Done callback.\n\n Args:\n task_id (int): Task ID.\n stat (dict): Task result.\n\n Returns:\n None\n\n '
nonlocal result_stat
nonlocal result_pass
result_stat = (stat['utime'], stat['peakmem'], stat['detect_error'])
if (result_pass is not None):
callback((result_pass, result_stat, ))<|docstring|>Done callback.
Args:
task_id (int): Task ID.
stat (dict): Task result.
Returns:
None<|endoftext|>
|
5f93d67c4a0ea63ce1b7baa9562a4a7a1706edbb4a8f5cdc50f58da442c5e2d4
|
def _diff_out(evfd, events):
'Diff the output of the task.\n\n Args:\n evfd (int): Event file descriptor.\n events (int): Event flags.\n\n Returns:\n None\n\n '
nonlocal outpipe_fd
nonlocal ansfile
nonlocal result_stat
nonlocal result_pass
end_flag = False
if (events & IOLoop.READ):
while True:
try:
data = os.read(outpipe_fd[0], 65536)
except BlockingIOError:
break
ansdata = ansfile.read(len(data))
if (data != ansdata):
result_pass = False
end_flag = True
break
if (len(ansdata) == 0):
if (len(ansfile.read(1)) == 0):
result_pass = True
else:
result_pass = False
end_flag = True
break
if ((events & IOLoop.ERROR) or end_flag):
if (result_pass is None):
if (len(ansfile.read(1)) == 0):
result_pass = True
else:
result_pass = False
IOLoop.instance().remove_handler(evfd)
os.close(outpipe_fd[0])
ansfile.close()
if (result_stat is not None):
callback((result_pass, result_stat, ''))
|
Diff the output of the task.
Args:
evfd (int): Event file descriptor.
events (int): Event flags.
Returns:
None
|
StdChal.py
|
_diff_out
|
pzread/judge
| 25
|
python
|
def _diff_out(evfd, events):
'Diff the output of the task.\n\n Args:\n evfd (int): Event file descriptor.\n events (int): Event flags.\n\n Returns:\n None\n\n '
nonlocal outpipe_fd
nonlocal ansfile
nonlocal result_stat
nonlocal result_pass
end_flag = False
if (events & IOLoop.READ):
while True:
try:
data = os.read(outpipe_fd[0], 65536)
except BlockingIOError:
break
ansdata = ansfile.read(len(data))
if (data != ansdata):
result_pass = False
end_flag = True
break
if (len(ansdata) == 0):
if (len(ansfile.read(1)) == 0):
result_pass = True
else:
result_pass = False
end_flag = True
break
if ((events & IOLoop.ERROR) or end_flag):
if (result_pass is None):
if (len(ansfile.read(1)) == 0):
result_pass = True
else:
result_pass = False
IOLoop.instance().remove_handler(evfd)
os.close(outpipe_fd[0])
ansfile.close()
if (result_stat is not None):
callback((result_pass, result_stat, ))
|
def _diff_out(evfd, events):
'Diff the output of the task.\n\n Args:\n evfd (int): Event file descriptor.\n events (int): Event flags.\n\n Returns:\n None\n\n '
nonlocal outpipe_fd
nonlocal ansfile
nonlocal result_stat
nonlocal result_pass
end_flag = False
if (events & IOLoop.READ):
while True:
try:
data = os.read(outpipe_fd[0], 65536)
except BlockingIOError:
break
ansdata = ansfile.read(len(data))
if (data != ansdata):
result_pass = False
end_flag = True
break
if (len(ansdata) == 0):
if (len(ansfile.read(1)) == 0):
result_pass = True
else:
result_pass = False
end_flag = True
break
if ((events & IOLoop.ERROR) or end_flag):
if (result_pass is None):
if (len(ansfile.read(1)) == 0):
result_pass = True
else:
result_pass = False
IOLoop.instance().remove_handler(evfd)
os.close(outpipe_fd[0])
ansfile.close()
if (result_stat is not None):
callback((result_pass, result_stat, ))<|docstring|>Diff the output of the task.
Args:
evfd (int): Event file descriptor.
events (int): Event flags.
Returns:
None<|endoftext|>
|
864bc20540473d02ee0cf0122f15e77aab03f4d594ae8200a775af8be6a3701f
|
def _done_cb(task_id, stat):
'Done callback.\n\n Args:\n task_id (int): Task ID.\n stat (dict): Task result.\n\n Returns:\n None\n\n '
if (stat['detect_error'] == PyExt.DETECT_NONE):
callback(True)
else:
callback(False)
|
Done callback.
Args:
task_id (int): Task ID.
stat (dict): Task result.
Returns:
None
|
StdChal.py
|
_done_cb
|
pzread/judge
| 25
|
python
|
def _done_cb(task_id, stat):
'Done callback.\n\n Args:\n task_id (int): Task ID.\n stat (dict): Task result.\n\n Returns:\n None\n\n '
if (stat['detect_error'] == PyExt.DETECT_NONE):
callback(True)
else:
callback(False)
|
def _done_cb(task_id, stat):
'Done callback.\n\n Args:\n task_id (int): Task ID.\n stat (dict): Task result.\n\n Returns:\n None\n\n '
if (stat['detect_error'] == PyExt.DETECT_NONE):
callback(True)
else:
callback(False)<|docstring|>Done callback.
Args:
task_id (int): Task ID.
stat (dict): Task result.
Returns:
None<|endoftext|>
|
127f4b68806d75f225594ddfa2e031a077bde0a357b64075f4fe3b072c6c6648
|
def _check_started_cb(task_id):
'Check started callback.\n\n Close unused file descriptors after the check is started.\n\n Args:\n task_id (int): Task ID.\n\n Returns:\n None\n\n '
nonlocal inpipe_fd
nonlocal outpipe_fd
nonlocal ansfile_fd
nonlocal check_infile_fd
os.close(inpipe_fd[1])
os.close(outpipe_fd[0])
if (ansfile_fd is not None):
os.close(ansfile_fd)
if (check_infile_fd is not None):
os.close(check_infile_fd)
|
Check started callback.
Close unused file descriptors after the check is started.
Args:
task_id (int): Task ID.
Returns:
None
|
StdChal.py
|
_check_started_cb
|
pzread/judge
| 25
|
python
|
def _check_started_cb(task_id):
'Check started callback.\n\n Close unused file descriptors after the check is started.\n\n Args:\n task_id (int): Task ID.\n\n Returns:\n None\n\n '
nonlocal inpipe_fd
nonlocal outpipe_fd
nonlocal ansfile_fd
nonlocal check_infile_fd
os.close(inpipe_fd[1])
os.close(outpipe_fd[0])
if (ansfile_fd is not None):
os.close(ansfile_fd)
if (check_infile_fd is not None):
os.close(check_infile_fd)
|
def _check_started_cb(task_id):
'Check started callback.\n\n Close unused file descriptors after the check is started.\n\n Args:\n task_id (int): Task ID.\n\n Returns:\n None\n\n '
nonlocal inpipe_fd
nonlocal outpipe_fd
nonlocal ansfile_fd
nonlocal check_infile_fd
os.close(inpipe_fd[1])
os.close(outpipe_fd[0])
if (ansfile_fd is not None):
os.close(ansfile_fd)
if (check_infile_fd is not None):
os.close(check_infile_fd)<|docstring|>Check started callback.
Close unused file descriptors after the check is started.
Args:
task_id (int): Task ID.
Returns:
None<|endoftext|>
|
004e297cb5a04f38ba87df3e5410f64d55ba4dc6dc40ad4ac6d7a07c6209c061
|
def _test_started_cb(task_id):
'Test started callback.\n\n Close unused file descriptors after the test is started.\n\n Args:\n task_id (int): Task ID.\n\n Returns:\n None\n\n '
nonlocal inpipe_fd
nonlocal outpipe_fd
nonlocal outfile_fd
nonlocal test_infile_fd
os.close(inpipe_fd[0])
os.close(outpipe_fd[1])
os.close(outfile_fd)
if (test_infile_fd is not None):
os.close(test_infile_fd)
|
Test started callback.
Close unused file descriptors after the test is started.
Args:
task_id (int): Task ID.
Returns:
None
|
StdChal.py
|
_test_started_cb
|
pzread/judge
| 25
|
python
|
def _test_started_cb(task_id):
'Test started callback.\n\n Close unused file descriptors after the test is started.\n\n Args:\n task_id (int): Task ID.\n\n Returns:\n None\n\n '
nonlocal inpipe_fd
nonlocal outpipe_fd
nonlocal outfile_fd
nonlocal test_infile_fd
os.close(inpipe_fd[0])
os.close(outpipe_fd[1])
os.close(outfile_fd)
if (test_infile_fd is not None):
os.close(test_infile_fd)
|
def _test_started_cb(task_id):
'Test started callback.\n\n Close unused file descriptors after the test is started.\n\n Args:\n task_id (int): Task ID.\n\n Returns:\n None\n\n '
nonlocal inpipe_fd
nonlocal outpipe_fd
nonlocal outfile_fd
nonlocal test_infile_fd
os.close(inpipe_fd[0])
os.close(outpipe_fd[1])
os.close(outfile_fd)
if (test_infile_fd is not None):
os.close(test_infile_fd)<|docstring|>Test started callback.
Close unused file descriptors after the test is started.
Args:
task_id (int): Task ID.
Returns:
None<|endoftext|>
|
b6545c6ba7b4d51c246b7c4d012d3cdd5e3c94d39c2735c2e2f59510248d7d5f
|
def _done_cb():
'Done callback.'
nonlocal result_stat
nonlocal result_pass
nonlocal verdict_path
if ((result_pass is not None) and (result_stat is not None)):
with StackContext(Privilege.fileaccess):
verfile = open(verdict_path, 'r')
verdict = verfile.read(140)
verfile.close()
callback((result_pass, result_stat, verdict))
return
|
Done callback.
|
StdChal.py
|
_done_cb
|
pzread/judge
| 25
|
python
|
def _done_cb():
nonlocal result_stat
nonlocal result_pass
nonlocal verdict_path
if ((result_pass is not None) and (result_stat is not None)):
with StackContext(Privilege.fileaccess):
verfile = open(verdict_path, 'r')
verdict = verfile.read(140)
verfile.close()
callback((result_pass, result_stat, verdict))
return
|
def _done_cb():
nonlocal result_stat
nonlocal result_pass
nonlocal verdict_path
if ((result_pass is not None) and (result_stat is not None)):
with StackContext(Privilege.fileaccess):
verfile = open(verdict_path, 'r')
verdict = verfile.read(140)
verfile.close()
callback((result_pass, result_stat, verdict))
return<|docstring|>Done callback.<|endoftext|>
|
cd67fdc1848092bd880506df5eefaa6559e4ffce9d7eb8e3ddd45abda134d426
|
def _check_done_cb(task_id, stat):
'Check done callback.\n\n Args:\n task_id (int): Task ID.\n stat (dict): Task result.\n\n Returns:\n None\n\n '
nonlocal result_pass
if (stat['detect_error'] == PyExt.DETECT_NONE):
result_pass = True
else:
result_pass = False
_done_cb()
|
Check done callback.
Args:
task_id (int): Task ID.
stat (dict): Task result.
Returns:
None
|
StdChal.py
|
_check_done_cb
|
pzread/judge
| 25
|
python
|
def _check_done_cb(task_id, stat):
'Check done callback.\n\n Args:\n task_id (int): Task ID.\n stat (dict): Task result.\n\n Returns:\n None\n\n '
nonlocal result_pass
if (stat['detect_error'] == PyExt.DETECT_NONE):
result_pass = True
else:
result_pass = False
_done_cb()
|
def _check_done_cb(task_id, stat):
'Check done callback.\n\n Args:\n task_id (int): Task ID.\n stat (dict): Task result.\n\n Returns:\n None\n\n '
nonlocal result_pass
if (stat['detect_error'] == PyExt.DETECT_NONE):
result_pass = True
else:
result_pass = False
_done_cb()<|docstring|>Check done callback.
Args:
task_id (int): Task ID.
stat (dict): Task result.
Returns:
None<|endoftext|>
|
ce48c1ea15d4573b53657db641053cc94d891719ed3862c94de1aeab73dc75fa
|
def _test_done_cb(task_id, stat):
'Test done callback.\n\n Args:\n task_id (int): Task ID.\n stat (dict): Task result.\n\n Returns:\n None\n\n '
nonlocal result_stat
result_stat = (stat['utime'], stat['peakmem'], stat['detect_error'])
_done_cb()
|
Test done callback.
Args:
task_id (int): Task ID.
stat (dict): Task result.
Returns:
None
|
StdChal.py
|
_test_done_cb
|
pzread/judge
| 25
|
python
|
def _test_done_cb(task_id, stat):
'Test done callback.\n\n Args:\n task_id (int): Task ID.\n stat (dict): Task result.\n\n Returns:\n None\n\n '
nonlocal result_stat
result_stat = (stat['utime'], stat['peakmem'], stat['detect_error'])
_done_cb()
|
def _test_done_cb(task_id, stat):
'Test done callback.\n\n Args:\n task_id (int): Task ID.\n stat (dict): Task result.\n\n Returns:\n None\n\n '
nonlocal result_stat
result_stat = (stat['utime'], stat['peakmem'], stat['detect_error'])
_done_cb()<|docstring|>Test done callback.
Args:
task_id (int): Task ID.
stat (dict): Task result.
Returns:
None<|endoftext|>
|
144bca87cffdfa93b920a94889ccfdabb0767527c5596ae2efaf2e08ad3265fe
|
async def serve_sports(request):
'Define an HTTP POST handler for receiving requests from If This Then That.\n\n You may modify this method to change how Cozmo reacts to\n an in-game update from IFTTT.\n '
json_object = (await request.json())
alert_body = json_object['AlertBody']
robot = request.app['robot']
async def read_name():
try:
async with robot.perform_off_charger():
"If necessary, Move Cozmo's Head and Lift to make it easy to see Cozmo's face."
(await robot.get_in_position())
(await robot.play_anim_trigger(cozmo.anim.Triggers.ReactToPokeStartled).wait_for_completed())
(await robot.say_text(alert_body).wait_for_completed())
robot.display_image_file_on_face('../face_images/ifttt_sports.png')
except cozmo.RobotBusy:
cozmo.logger.warning((("Robot was busy so didn't read update: '" + alert_body) + "'"))
asyncio.ensure_future(read_name())
return web.Response(text='OK')
|
Define an HTTP POST handler for receiving requests from If This Then That.
You may modify this method to change how Cozmo reacts to
an in-game update from IFTTT.
|
examples/if_this_then_that/ifttt_sports.py
|
serve_sports
|
zhangjifan5683/cozmo-python-sdk
| 794
|
python
|
async def serve_sports(request):
'Define an HTTP POST handler for receiving requests from If This Then That.\n\n You may modify this method to change how Cozmo reacts to\n an in-game update from IFTTT.\n '
json_object = (await request.json())
alert_body = json_object['AlertBody']
robot = request.app['robot']
async def read_name():
try:
async with robot.perform_off_charger():
"If necessary, Move Cozmo's Head and Lift to make it easy to see Cozmo's face."
(await robot.get_in_position())
(await robot.play_anim_trigger(cozmo.anim.Triggers.ReactToPokeStartled).wait_for_completed())
(await robot.say_text(alert_body).wait_for_completed())
robot.display_image_file_on_face('../face_images/ifttt_sports.png')
except cozmo.RobotBusy:
cozmo.logger.warning((("Robot was busy so didn't read update: '" + alert_body) + "'"))
asyncio.ensure_future(read_name())
return web.Response(text='OK')
|
async def serve_sports(request):
'Define an HTTP POST handler for receiving requests from If This Then That.\n\n You may modify this method to change how Cozmo reacts to\n an in-game update from IFTTT.\n '
json_object = (await request.json())
alert_body = json_object['AlertBody']
robot = request.app['robot']
async def read_name():
try:
async with robot.perform_off_charger():
"If necessary, Move Cozmo's Head and Lift to make it easy to see Cozmo's face."
(await robot.get_in_position())
(await robot.play_anim_trigger(cozmo.anim.Triggers.ReactToPokeStartled).wait_for_completed())
(await robot.say_text(alert_body).wait_for_completed())
robot.display_image_file_on_face('../face_images/ifttt_sports.png')
except cozmo.RobotBusy:
cozmo.logger.warning((("Robot was busy so didn't read update: '" + alert_body) + "'"))
asyncio.ensure_future(read_name())
return web.Response(text='OK')<|docstring|>Define an HTTP POST handler for receiving requests from If This Then That.
You may modify this method to change how Cozmo reacts to
an in-game update from IFTTT.<|endoftext|>
|
db27d071e3a1c38721159873029225f0a91c6e6640f7a0bf92be78bd6e87a00c
|
@classmethod
def from_opt(cls, opt, embeddings):
'Alternate constructor.'
return cls(embeddings, opt.enc_layers, opt.word_vec_size, opt.heads, opt.transformer_ff, (opt.dropout[0] if (type(opt.dropout) is list) else opt.dropout), (opt.attention_dropout[0] if (type(opt.attention_dropout) is list) else opt.attention_dropout), opt.max_relative_positions)
|
Alternate constructor.
|
onmt/encoders/bert.py
|
from_opt
|
SivilTaram/dialogue-utterance-rewriter-pytorch
| 0
|
python
|
@classmethod
def from_opt(cls, opt, embeddings):
return cls(embeddings, opt.enc_layers, opt.word_vec_size, opt.heads, opt.transformer_ff, (opt.dropout[0] if (type(opt.dropout) is list) else opt.dropout), (opt.attention_dropout[0] if (type(opt.attention_dropout) is list) else opt.attention_dropout), opt.max_relative_positions)
|
@classmethod
def from_opt(cls, opt, embeddings):
return cls(embeddings, opt.enc_layers, opt.word_vec_size, opt.heads, opt.transformer_ff, (opt.dropout[0] if (type(opt.dropout) is list) else opt.dropout), (opt.attention_dropout[0] if (type(opt.attention_dropout) is list) else opt.attention_dropout), opt.max_relative_positions)<|docstring|>Alternate constructor.<|endoftext|>
|
8565666ffb9cd1b53c875b4807a2482e88953f16fd08b51373821d0d5cb6a429
|
def forward(self, input_ids, lengths, token_type_ids=None):
'\n Args:\n input_ids (Tensor): ``(seq_len, batch_size, feature_dim)``, padding ids=0\n lengths (Tensor): ``(batch_size)``, record length of sequence\n token_type_ids (seq_len, batch_size): ``(B, S)``, A(0), B(1), pad(0)\n Returns:\n all_encoder_layers (list of Tensor): ``(B, S, H)``, token level\n pooled_output (Tensor): ``(B, H)``, sequence level\n '
emb = self.embeddings(input_ids, token_type_ids)
out = emb.transpose(0, 1).contiguous()
mask = (~ sequence_mask(lengths).unsqueeze(1))
for layer in self.encoder:
out = layer(out, mask)
out = self.layer_norm(out)
return (emb, out.transpose(0, 1).contiguous(), lengths)
|
Args:
input_ids (Tensor): ``(seq_len, batch_size, feature_dim)``, padding ids=0
lengths (Tensor): ``(batch_size)``, record length of sequence
token_type_ids (seq_len, batch_size): ``(B, S)``, A(0), B(1), pad(0)
Returns:
all_encoder_layers (list of Tensor): ``(B, S, H)``, token level
pooled_output (Tensor): ``(B, H)``, sequence level
|
onmt/encoders/bert.py
|
forward
|
SivilTaram/dialogue-utterance-rewriter-pytorch
| 0
|
python
|
def forward(self, input_ids, lengths, token_type_ids=None):
'\n Args:\n input_ids (Tensor): ``(seq_len, batch_size, feature_dim)``, padding ids=0\n lengths (Tensor): ``(batch_size)``, record length of sequence\n token_type_ids (seq_len, batch_size): ``(B, S)``, A(0), B(1), pad(0)\n Returns:\n all_encoder_layers (list of Tensor): ``(B, S, H)``, token level\n pooled_output (Tensor): ``(B, H)``, sequence level\n '
emb = self.embeddings(input_ids, token_type_ids)
out = emb.transpose(0, 1).contiguous()
mask = (~ sequence_mask(lengths).unsqueeze(1))
for layer in self.encoder:
out = layer(out, mask)
out = self.layer_norm(out)
return (emb, out.transpose(0, 1).contiguous(), lengths)
|
def forward(self, input_ids, lengths, token_type_ids=None):
'\n Args:\n input_ids (Tensor): ``(seq_len, batch_size, feature_dim)``, padding ids=0\n lengths (Tensor): ``(batch_size)``, record length of sequence\n token_type_ids (seq_len, batch_size): ``(B, S)``, A(0), B(1), pad(0)\n Returns:\n all_encoder_layers (list of Tensor): ``(B, S, H)``, token level\n pooled_output (Tensor): ``(B, H)``, sequence level\n '
emb = self.embeddings(input_ids, token_type_ids)
out = emb.transpose(0, 1).contiguous()
mask = (~ sequence_mask(lengths).unsqueeze(1))
for layer in self.encoder:
out = layer(out, mask)
out = self.layer_norm(out)
return (emb, out.transpose(0, 1).contiguous(), lengths)<|docstring|>Args:
input_ids (Tensor): ``(seq_len, batch_size, feature_dim)``, padding ids=0
lengths (Tensor): ``(batch_size)``, record length of sequence
token_type_ids (seq_len, batch_size): ``(B, S)``, A(0), B(1), pad(0)
Returns:
all_encoder_layers (list of Tensor): ``(B, S, H)``, token level
pooled_output (Tensor): ``(B, H)``, sequence level<|endoftext|>
|
cf84fd9ad0724ab8301cb774aaf7c417bdbe7c75c85b516adffe225ada09b9d8
|
def __init__(self, hidden_size):
'A pooling block (Linear layer followed by Tanh activation).\n\n Args:\n hidden_size (int): size of hidden layer.\n '
super(BertPooler, self).__init__()
self.dense = nn.Linear(hidden_size, hidden_size)
self.activation_fn = nn.Tanh()
|
A pooling block (Linear layer followed by Tanh activation).
Args:
hidden_size (int): size of hidden layer.
|
onmt/encoders/bert.py
|
__init__
|
SivilTaram/dialogue-utterance-rewriter-pytorch
| 0
|
python
|
def __init__(self, hidden_size):
'A pooling block (Linear layer followed by Tanh activation).\n\n Args:\n hidden_size (int): size of hidden layer.\n '
super(BertPooler, self).__init__()
self.dense = nn.Linear(hidden_size, hidden_size)
self.activation_fn = nn.Tanh()
|
def __init__(self, hidden_size):
'A pooling block (Linear layer followed by Tanh activation).\n\n Args:\n hidden_size (int): size of hidden layer.\n '
super(BertPooler, self).__init__()
self.dense = nn.Linear(hidden_size, hidden_size)
self.activation_fn = nn.Tanh()<|docstring|>A pooling block (Linear layer followed by Tanh activation).
Args:
hidden_size (int): size of hidden layer.<|endoftext|>
|
99c790eddabe046c9e676b8137a8dbd30ff962fc0927d675fd9df198729f4780
|
def forward(self, hidden_states):
"hidden_states[:, 0, :] --> {Linear, Tanh} --> Returns.\n\n Args:\n hidden_states (Tensor): last layer's hidden_states, ``(B, S, H)``\n Returns:\n pooled_output (Tensor): transformed output of last layer's hidden\n "
first_token_tensor = hidden_states[(:, 0, :)]
pooled_output = self.activation_fn(self.dense(first_token_tensor))
return pooled_output
|
hidden_states[:, 0, :] --> {Linear, Tanh} --> Returns.
Args:
hidden_states (Tensor): last layer's hidden_states, ``(B, S, H)``
Returns:
pooled_output (Tensor): transformed output of last layer's hidden
|
onmt/encoders/bert.py
|
forward
|
SivilTaram/dialogue-utterance-rewriter-pytorch
| 0
|
python
|
def forward(self, hidden_states):
"hidden_states[:, 0, :] --> {Linear, Tanh} --> Returns.\n\n Args:\n hidden_states (Tensor): last layer's hidden_states, ``(B, S, H)``\n Returns:\n pooled_output (Tensor): transformed output of last layer's hidden\n "
first_token_tensor = hidden_states[(:, 0, :)]
pooled_output = self.activation_fn(self.dense(first_token_tensor))
return pooled_output
|
def forward(self, hidden_states):
"hidden_states[:, 0, :] --> {Linear, Tanh} --> Returns.\n\n Args:\n hidden_states (Tensor): last layer's hidden_states, ``(B, S, H)``\n Returns:\n pooled_output (Tensor): transformed output of last layer's hidden\n "
first_token_tensor = hidden_states[(:, 0, :)]
pooled_output = self.activation_fn(self.dense(first_token_tensor))
return pooled_output<|docstring|>hidden_states[:, 0, :] --> {Linear, Tanh} --> Returns.
Args:
hidden_states (Tensor): last layer's hidden_states, ``(B, S, H)``
Returns:
pooled_output (Tensor): transformed output of last layer's hidden<|endoftext|>
|
a7cb0e56cef1a597998974d7ed37cfc0b92f68e983b6a81668bcefdcf9be7bd8
|
def for_O():
"printing capital 'O' using for loop"
for row in range(5):
for col in range(5):
if (((col == 0) and (row not in (0, 4))) or ((col == 4) and (row not in (0, 4))) or ((row == 0) and (col in (1, 2, 3))) or ((row == 4) and (col in (1, 2, 3)))):
print('*', end=' ')
else:
print(' ', end=' ')
print()
|
printing capital 'O' using for loop
|
package/alphabets/capital_alphabets/O.py
|
for_O
|
venkateshvsn/patterns
| 0
|
python
|
def for_O():
for row in range(5):
for col in range(5):
if (((col == 0) and (row not in (0, 4))) or ((col == 4) and (row not in (0, 4))) or ((row == 0) and (col in (1, 2, 3))) or ((row == 4) and (col in (1, 2, 3)))):
print('*', end=' ')
else:
print(' ', end=' ')
print()
|
def for_O():
for row in range(5):
for col in range(5):
if (((col == 0) and (row not in (0, 4))) or ((col == 4) and (row not in (0, 4))) or ((row == 0) and (col in (1, 2, 3))) or ((row == 4) and (col in (1, 2, 3)))):
print('*', end=' ')
else:
print(' ', end=' ')
print()<|docstring|>printing capital 'O' using for loop<|endoftext|>
|
cf22b867a100520b48b4b8c9e76bee132e1b3767097831f6f9d0a2ad29547dcf
|
def while_O():
"printing capital 'O' using while loop"
i = 0
while (i < 5):
j = 0
while (j < 5):
if (((j == 0) and (i not in (0, 4))) or ((i == 0) and (j not in (0, 4))) or ((i == 4) and (j not in (0, 4))) or ((j == 4) and (i not in (0, 4)))):
print('*', end=' ')
else:
print(' ', end=' ')
j += 1
i += 1
print()
|
printing capital 'O' using while loop
|
package/alphabets/capital_alphabets/O.py
|
while_O
|
venkateshvsn/patterns
| 0
|
python
|
def while_O():
i = 0
while (i < 5):
j = 0
while (j < 5):
if (((j == 0) and (i not in (0, 4))) or ((i == 0) and (j not in (0, 4))) or ((i == 4) and (j not in (0, 4))) or ((j == 4) and (i not in (0, 4)))):
print('*', end=' ')
else:
print(' ', end=' ')
j += 1
i += 1
print()
|
def while_O():
i = 0
while (i < 5):
j = 0
while (j < 5):
if (((j == 0) and (i not in (0, 4))) or ((i == 0) and (j not in (0, 4))) or ((i == 4) and (j not in (0, 4))) or ((j == 4) and (i not in (0, 4)))):
print('*', end=' ')
else:
print(' ', end=' ')
j += 1
i += 1
print()<|docstring|>printing capital 'O' using while loop<|endoftext|>
|
b4d56132d3b677dd8d3b6ed171009d662171fa1d773dfbaf02aac17458246cb6
|
def create_subst_array(size: int, values: Sequence[float], conc: Union[(float, Sequence[float])]) -> np.ndarray:
'Creates an (ordered) array of values.\n\n Parameters\n ----------\n size : int\n The size of the output array.\n values : Sequence of float\n The values for filling the array. The size must match the size of the\n concentrations. If one concentration is given the value-array must be of size 2.\n conc : float or Sequence of float\n The concentrations of the values. If a single concentration is given\n it is interpreted as the concentration of the first of two values.\n\n Returns\n -------\n array : np.ndarray\n The (ordered) array filled with the given values.\n '
if isinstance(conc, float):
conc = [conc, (1 - conc)]
if (sum(conc) != 1):
raise ValueError('Fractions have to add up to 1!')
sizes = (size * np.array(conc)).astype(np.int64)
sizes[(- 1)] += (size - sum(sizes))
arrays = [np.full(size, val) for (size, val) in zip(sizes, values)]
return np.concatenate(arrays)
|
Creates an (ordered) array of values.
Parameters
----------
size : int
The size of the output array.
values : Sequence of float
The values for filling the array. The size must match the size of the
concentrations. If one concentration is given the value-array must be of size 2.
conc : float or Sequence of float
The concentrations of the values. If a single concentration is given
it is interpreted as the concentration of the first of two values.
Returns
-------
array : np.ndarray
The (ordered) array filled with the given values.
|
cmpy/disorder.py
|
create_subst_array
|
dylanljones/cmpy
| 2
|
python
|
def create_subst_array(size: int, values: Sequence[float], conc: Union[(float, Sequence[float])]) -> np.ndarray:
'Creates an (ordered) array of values.\n\n Parameters\n ----------\n size : int\n The size of the output array.\n values : Sequence of float\n The values for filling the array. The size must match the size of the\n concentrations. If one concentration is given the value-array must be of size 2.\n conc : float or Sequence of float\n The concentrations of the values. If a single concentration is given\n it is interpreted as the concentration of the first of two values.\n\n Returns\n -------\n array : np.ndarray\n The (ordered) array filled with the given values.\n '
if isinstance(conc, float):
conc = [conc, (1 - conc)]
if (sum(conc) != 1):
raise ValueError('Fractions have to add up to 1!')
sizes = (size * np.array(conc)).astype(np.int64)
sizes[(- 1)] += (size - sum(sizes))
arrays = [np.full(size, val) for (size, val) in zip(sizes, values)]
return np.concatenate(arrays)
|
def create_subst_array(size: int, values: Sequence[float], conc: Union[(float, Sequence[float])]) -> np.ndarray:
'Creates an (ordered) array of values.\n\n Parameters\n ----------\n size : int\n The size of the output array.\n values : Sequence of float\n The values for filling the array. The size must match the size of the\n concentrations. If one concentration is given the value-array must be of size 2.\n conc : float or Sequence of float\n The concentrations of the values. If a single concentration is given\n it is interpreted as the concentration of the first of two values.\n\n Returns\n -------\n array : np.ndarray\n The (ordered) array filled with the given values.\n '
if isinstance(conc, float):
conc = [conc, (1 - conc)]
if (sum(conc) != 1):
raise ValueError('Fractions have to add up to 1!')
sizes = (size * np.array(conc)).astype(np.int64)
sizes[(- 1)] += (size - sum(sizes))
arrays = [np.full(size, val) for (size, val) in zip(sizes, values)]
return np.concatenate(arrays)<|docstring|>Creates an (ordered) array of values.
Parameters
----------
size : int
The size of the output array.
values : Sequence of float
The values for filling the array. The size must match the size of the
concentrations. If one concentration is given the value-array must be of size 2.
conc : float or Sequence of float
The concentrations of the values. If a single concentration is given
it is interpreted as the concentration of the first of two values.
Returns
-------
array : np.ndarray
The (ordered) array filled with the given values.<|endoftext|>
|
c6d2ac171396e973751b051b75f072d916241c0cdd5a19a62abe9909b76d6f7f
|
def random_permutations(arr: Sequence[float], size: int, replace: bool=False, seed: int=None):
'Creates (optionally unique) permutations of a given array.\n\n Parameters\n ----------\n arr : (N) np.ndarray\n The input array to permute.\n size : int\n The number of permutations to generate.\n replace : bool, optional\n If `True`, only unique permutations are returned. The default is `True`.\n seed : int, optional\n A optional seed to initialize the random number generator.\n\n Yields\n ------\n perm : (N) np.ndarray\n The permuted array.\n\n Examples\n --------\n >>> a = [0, 0, 1, 1, 1]\n >>> perm = random_permutations(a, size=2, seed=0)\n >>> next(perm)\n array([1, 1, 1, 0, 0])\n >>> next(perm)\n array([0, 1, 1, 1, 0])\n '
rng = np.random.default_rng(seed)
p = np.array(arr)
seen = set()
count = 0
while True:
if (count >= size):
break
rng.shuffle(p)
if (not replace):
phash = hash(p.data.tobytes())
if (phash not in seen):
seen.add(phash)
(yield p)
count += 1
else:
(yield p)
count += 1
|
Creates (optionally unique) permutations of a given array.
Parameters
----------
arr : (N) np.ndarray
The input array to permute.
size : int
The number of permutations to generate.
replace : bool, optional
If `True`, only unique permutations are returned. The default is `True`.
seed : int, optional
A optional seed to initialize the random number generator.
Yields
------
perm : (N) np.ndarray
The permuted array.
Examples
--------
>>> a = [0, 0, 1, 1, 1]
>>> perm = random_permutations(a, size=2, seed=0)
>>> next(perm)
array([1, 1, 1, 0, 0])
>>> next(perm)
array([0, 1, 1, 1, 0])
|
cmpy/disorder.py
|
random_permutations
|
dylanljones/cmpy
| 2
|
python
|
def random_permutations(arr: Sequence[float], size: int, replace: bool=False, seed: int=None):
'Creates (optionally unique) permutations of a given array.\n\n Parameters\n ----------\n arr : (N) np.ndarray\n The input array to permute.\n size : int\n The number of permutations to generate.\n replace : bool, optional\n If `True`, only unique permutations are returned. The default is `True`.\n seed : int, optional\n A optional seed to initialize the random number generator.\n\n Yields\n ------\n perm : (N) np.ndarray\n The permuted array.\n\n Examples\n --------\n >>> a = [0, 0, 1, 1, 1]\n >>> perm = random_permutations(a, size=2, seed=0)\n >>> next(perm)\n array([1, 1, 1, 0, 0])\n >>> next(perm)\n array([0, 1, 1, 1, 0])\n '
rng = np.random.default_rng(seed)
p = np.array(arr)
seen = set()
count = 0
while True:
if (count >= size):
break
rng.shuffle(p)
if (not replace):
phash = hash(p.data.tobytes())
if (phash not in seen):
seen.add(phash)
(yield p)
count += 1
else:
(yield p)
count += 1
|
def random_permutations(arr: Sequence[float], size: int, replace: bool=False, seed: int=None):
'Creates (optionally unique) permutations of a given array.\n\n Parameters\n ----------\n arr : (N) np.ndarray\n The input array to permute.\n size : int\n The number of permutations to generate.\n replace : bool, optional\n If `True`, only unique permutations are returned. The default is `True`.\n seed : int, optional\n A optional seed to initialize the random number generator.\n\n Yields\n ------\n perm : (N) np.ndarray\n The permuted array.\n\n Examples\n --------\n >>> a = [0, 0, 1, 1, 1]\n >>> perm = random_permutations(a, size=2, seed=0)\n >>> next(perm)\n array([1, 1, 1, 0, 0])\n >>> next(perm)\n array([0, 1, 1, 1, 0])\n '
rng = np.random.default_rng(seed)
p = np.array(arr)
seen = set()
count = 0
while True:
if (count >= size):
break
rng.shuffle(p)
if (not replace):
phash = hash(p.data.tobytes())
if (phash not in seen):
seen.add(phash)
(yield p)
count += 1
else:
(yield p)
count += 1<|docstring|>Creates (optionally unique) permutations of a given array.
Parameters
----------
arr : (N) np.ndarray
The input array to permute.
size : int
The number of permutations to generate.
replace : bool, optional
If `True`, only unique permutations are returned. The default is `True`.
seed : int, optional
A optional seed to initialize the random number generator.
Yields
------
perm : (N) np.ndarray
The permuted array.
Examples
--------
>>> a = [0, 0, 1, 1, 1]
>>> perm = random_permutations(a, size=2, seed=0)
>>> next(perm)
array([1, 1, 1, 0, 0])
>>> next(perm)
array([0, 1, 1, 1, 0])<|endoftext|>
|
17438efc9d617d3c0922ed01fb40006b92307c133ddcb92e81e320b2fedbc240
|
def disorder_generator(size: int, values: Sequence[float], conc: Union[(float, Sequence[float])], samples: int, replace: bool=False, seed=None):
'Generates (optionally unique) random samples from a given 1-D array.\n\n See Also\n --------\n random_permutations\n\n Parameters\n ----------\n size : int\n The size of the output array.\n values : Sequence of float\n The values for filling the array. The size must match the size of the\n concentrations. If one concentration is given the value-array must be of size 2.\n conc : float or Sequence of float\n The concentrations of the values. If a single concentration is given\n it is interpreted as the concentration of the first of two values.\n samples : int\n The number of random arrays to generate.\n replace : bool, optional\n If `True`, only unique permutations are returned. The default is `True`.\n seed : int, optional\n A optional seed to initialize the random number generator.\n\n Yields\n ------\n perm : (N) np.ndarray\n The randomly sampled arrays.\n\n Examples\n --------\n >>> eps = disorder_generator(5, values=[0, +1], conc=[0.4, 0.6], samples=2, seed=0)\n >>> next(eps)\n array([1, 1, 1, 0, 0])\n >>> next(eps)\n array([0, 1, 1, 1, 0])\n '
ordered = create_subst_array(size, values, conc)
return random_permutations(ordered, samples, replace, seed)
|
Generates (optionally unique) random samples from a given 1-D array.
See Also
--------
random_permutations
Parameters
----------
size : int
The size of the output array.
values : Sequence of float
The values for filling the array. The size must match the size of the
concentrations. If one concentration is given the value-array must be of size 2.
conc : float or Sequence of float
The concentrations of the values. If a single concentration is given
it is interpreted as the concentration of the first of two values.
samples : int
The number of random arrays to generate.
replace : bool, optional
If `True`, only unique permutations are returned. The default is `True`.
seed : int, optional
A optional seed to initialize the random number generator.
Yields
------
perm : (N) np.ndarray
The randomly sampled arrays.
Examples
--------
>>> eps = disorder_generator(5, values=[0, +1], conc=[0.4, 0.6], samples=2, seed=0)
>>> next(eps)
array([1, 1, 1, 0, 0])
>>> next(eps)
array([0, 1, 1, 1, 0])
|
cmpy/disorder.py
|
disorder_generator
|
dylanljones/cmpy
| 2
|
python
|
def disorder_generator(size: int, values: Sequence[float], conc: Union[(float, Sequence[float])], samples: int, replace: bool=False, seed=None):
'Generates (optionally unique) random samples from a given 1-D array.\n\n See Also\n --------\n random_permutations\n\n Parameters\n ----------\n size : int\n The size of the output array.\n values : Sequence of float\n The values for filling the array. The size must match the size of the\n concentrations. If one concentration is given the value-array must be of size 2.\n conc : float or Sequence of float\n The concentrations of the values. If a single concentration is given\n it is interpreted as the concentration of the first of two values.\n samples : int\n The number of random arrays to generate.\n replace : bool, optional\n If `True`, only unique permutations are returned. The default is `True`.\n seed : int, optional\n A optional seed to initialize the random number generator.\n\n Yields\n ------\n perm : (N) np.ndarray\n The randomly sampled arrays.\n\n Examples\n --------\n >>> eps = disorder_generator(5, values=[0, +1], conc=[0.4, 0.6], samples=2, seed=0)\n >>> next(eps)\n array([1, 1, 1, 0, 0])\n >>> next(eps)\n array([0, 1, 1, 1, 0])\n '
ordered = create_subst_array(size, values, conc)
return random_permutations(ordered, samples, replace, seed)
|
def disorder_generator(size: int, values: Sequence[float], conc: Union[(float, Sequence[float])], samples: int, replace: bool=False, seed=None):
'Generates (optionally unique) random samples from a given 1-D array.\n\n See Also\n --------\n random_permutations\n\n Parameters\n ----------\n size : int\n The size of the output array.\n values : Sequence of float\n The values for filling the array. The size must match the size of the\n concentrations. If one concentration is given the value-array must be of size 2.\n conc : float or Sequence of float\n The concentrations of the values. If a single concentration is given\n it is interpreted as the concentration of the first of two values.\n samples : int\n The number of random arrays to generate.\n replace : bool, optional\n If `True`, only unique permutations are returned. The default is `True`.\n seed : int, optional\n A optional seed to initialize the random number generator.\n\n Yields\n ------\n perm : (N) np.ndarray\n The randomly sampled arrays.\n\n Examples\n --------\n >>> eps = disorder_generator(5, values=[0, +1], conc=[0.4, 0.6], samples=2, seed=0)\n >>> next(eps)\n array([1, 1, 1, 0, 0])\n >>> next(eps)\n array([0, 1, 1, 1, 0])\n '
ordered = create_subst_array(size, values, conc)
return random_permutations(ordered, samples, replace, seed)<|docstring|>Generates (optionally unique) random samples from a given 1-D array.
See Also
--------
random_permutations
Parameters
----------
size : int
The size of the output array.
values : Sequence of float
The values for filling the array. The size must match the size of the
concentrations. If one concentration is given the value-array must be of size 2.
conc : float or Sequence of float
The concentrations of the values. If a single concentration is given
it is interpreted as the concentration of the first of two values.
samples : int
The number of random arrays to generate.
replace : bool, optional
If `True`, only unique permutations are returned. The default is `True`.
seed : int, optional
A optional seed to initialize the random number generator.
Yields
------
perm : (N) np.ndarray
The randomly sampled arrays.
Examples
--------
>>> eps = disorder_generator(5, values=[0, +1], conc=[0.4, 0.6], samples=2, seed=0)
>>> next(eps)
array([1, 1, 1, 0, 0])
>>> next(eps)
array([0, 1, 1, 1, 0])<|endoftext|>
|
f17fb8d7eafed0139e742f16fcfdd73c45714735999ae710b68d1f366ca7c0b5
|
def __init__(self, each_object_size_width=config.TRAIN_OBJECT_WIDTH, each_object_size_height=config.TRAIN_OBJECT_HEIGHT, train_batch=10000, train_steps=800, train_learning_rate=0.5):
'\n Constructor.\n '
self.each_object_size_width = each_object_size_width
self.each_object_size_height = each_object_size_height
self.train_batch = train_batch
self.train_steps = train_steps
self.train_learning_rate = train_learning_rate
helper = TrainingHelper()
self.__print_predications__ = helper.print_predications
self.__print_test_accuracy__ = helper.print_test_accuracy
self.__activation__ = helper.activation
self.__loss_optimizer__ = helper.loss_optimizer
|
Constructor.
|
machine_learning/tf_notMNIST_Training_Gradient_Descent.py
|
__init__
|
XinyueZ/some-python-codes
| 0
|
python
|
def __init__(self, each_object_size_width=config.TRAIN_OBJECT_WIDTH, each_object_size_height=config.TRAIN_OBJECT_HEIGHT, train_batch=10000, train_steps=800, train_learning_rate=0.5):
'\n \n '
self.each_object_size_width = each_object_size_width
self.each_object_size_height = each_object_size_height
self.train_batch = train_batch
self.train_steps = train_steps
self.train_learning_rate = train_learning_rate
helper = TrainingHelper()
self.__print_predications__ = helper.print_predications
self.__print_test_accuracy__ = helper.print_test_accuracy
self.__activation__ = helper.activation
self.__loss_optimizer__ = helper.loss_optimizer
|
def __init__(self, each_object_size_width=config.TRAIN_OBJECT_WIDTH, each_object_size_height=config.TRAIN_OBJECT_HEIGHT, train_batch=10000, train_steps=800, train_learning_rate=0.5):
'\n \n '
self.each_object_size_width = each_object_size_width
self.each_object_size_height = each_object_size_height
self.train_batch = train_batch
self.train_steps = train_steps
self.train_learning_rate = train_learning_rate
helper = TrainingHelper()
self.__print_predications__ = helper.print_predications
self.__print_test_accuracy__ = helper.print_test_accuracy
self.__activation__ = helper.activation
self.__loss_optimizer__ = helper.loss_optimizer<|docstring|>Constructor.<|endoftext|>
|
f732a44ced5f9464fb90eca36b6aea36023098c83c61cb993437ba003db4d2c8
|
def start_with(self, train_dataset, train_labels, valid_dataset, valid_labels, test_dataset, test_labels, count_classes, beta_for_regularizer=0.01):
'\n Start multinomial logistic regression using simple gradient descent.\n '
tf_train_dataset = constant(train_dataset[(:self.train_batch, :)])
tf_train_labels = constant(train_labels[:self.train_batch])
tf_valid_dataset = constant(valid_dataset)
tf_test_dataset = constant(test_dataset)
tf_weights = Variable(truncated_normal([(self.each_object_size_width * self.each_object_size_height), count_classes]))
tf_biases = Variable(zeros([count_classes]))
logits = self.__activation__(tf_train_dataset, tf_weights, tf_biases)
(loss, optimizer) = self.__loss_optimizer__(tf_train_labels, logits, self.train_learning_rate, beta_for_regularizer, [tf_weights])
predication_for_train = tf.nn.softmax(logits)
predication_for_valid = tf.nn.softmax(self.__activation__(tf_valid_dataset, tf_weights, tf_biases))
predication_for_test = tf.nn.softmax(self.__activation__(tf_test_dataset, tf_weights, tf_biases))
print('\n')
with tf.Session() as sess:
init = global_variables_initializer()
sess.run(init)
for step in range(self.train_steps):
(_, ls, predications) = sess.run([optimizer, loss, predication_for_train])
self.__print_predications__(step, ls, predications, train_labels[(:self.train_batch, :)], predication_for_valid, valid_labels)
self.__print_test_accuracy__(predication_for_test, test_labels)
|
Start multinomial logistic regression using simple gradient descent.
|
machine_learning/tf_notMNIST_Training_Gradient_Descent.py
|
start_with
|
XinyueZ/some-python-codes
| 0
|
python
|
def start_with(self, train_dataset, train_labels, valid_dataset, valid_labels, test_dataset, test_labels, count_classes, beta_for_regularizer=0.01):
'\n \n '
tf_train_dataset = constant(train_dataset[(:self.train_batch, :)])
tf_train_labels = constant(train_labels[:self.train_batch])
tf_valid_dataset = constant(valid_dataset)
tf_test_dataset = constant(test_dataset)
tf_weights = Variable(truncated_normal([(self.each_object_size_width * self.each_object_size_height), count_classes]))
tf_biases = Variable(zeros([count_classes]))
logits = self.__activation__(tf_train_dataset, tf_weights, tf_biases)
(loss, optimizer) = self.__loss_optimizer__(tf_train_labels, logits, self.train_learning_rate, beta_for_regularizer, [tf_weights])
predication_for_train = tf.nn.softmax(logits)
predication_for_valid = tf.nn.softmax(self.__activation__(tf_valid_dataset, tf_weights, tf_biases))
predication_for_test = tf.nn.softmax(self.__activation__(tf_test_dataset, tf_weights, tf_biases))
print('\n')
with tf.Session() as sess:
init = global_variables_initializer()
sess.run(init)
for step in range(self.train_steps):
(_, ls, predications) = sess.run([optimizer, loss, predication_for_train])
self.__print_predications__(step, ls, predications, train_labels[(:self.train_batch, :)], predication_for_valid, valid_labels)
self.__print_test_accuracy__(predication_for_test, test_labels)
|
def start_with(self, train_dataset, train_labels, valid_dataset, valid_labels, test_dataset, test_labels, count_classes, beta_for_regularizer=0.01):
'\n \n '
tf_train_dataset = constant(train_dataset[(:self.train_batch, :)])
tf_train_labels = constant(train_labels[:self.train_batch])
tf_valid_dataset = constant(valid_dataset)
tf_test_dataset = constant(test_dataset)
tf_weights = Variable(truncated_normal([(self.each_object_size_width * self.each_object_size_height), count_classes]))
tf_biases = Variable(zeros([count_classes]))
logits = self.__activation__(tf_train_dataset, tf_weights, tf_biases)
(loss, optimizer) = self.__loss_optimizer__(tf_train_labels, logits, self.train_learning_rate, beta_for_regularizer, [tf_weights])
predication_for_train = tf.nn.softmax(logits)
predication_for_valid = tf.nn.softmax(self.__activation__(tf_valid_dataset, tf_weights, tf_biases))
predication_for_test = tf.nn.softmax(self.__activation__(tf_test_dataset, tf_weights, tf_biases))
print('\n')
with tf.Session() as sess:
init = global_variables_initializer()
sess.run(init)
for step in range(self.train_steps):
(_, ls, predications) = sess.run([optimizer, loss, predication_for_train])
self.__print_predications__(step, ls, predications, train_labels[(:self.train_batch, :)], predication_for_valid, valid_labels)
self.__print_test_accuracy__(predication_for_test, test_labels)<|docstring|>Start multinomial logistic regression using simple gradient descent.<|endoftext|>
|
08b1471372c04f6ca2f96cc67296c4c4f75f8f05464e7a6df6594d28288dab86
|
def validate_args(args):
' Validate all of the arguments parsed.\n Args:\n args (argparser.ArgumentParser) : Args parsed by the argument parser.\n Returns:\n args (CoreclrArguments) : Args parsed\n Notes:\n If the arguments are valid then return them all in a tuple. If not,\n raise an exception stating x argument is incorrect.\n '
coreclr_setup_args = CoreclrArguments(args, require_built_test_dir=False, require_built_core_root=True, require_built_product_dir=False)
coreclr_setup_args.verify(args, 'base_root', (lambda directory: (os.path.isdir(directory) if (directory is not None) else True)), 'Base root is not a valid directory')
coreclr_setup_args.verify(args, 'diff_root', (lambda directory: (os.path.isdir(directory) if (directory is not None) else True)), 'Diff root is not a valid directory', modify_arg=(lambda directory: (nth_dirname(os.path.abspath(sys.argv[0]), 3) if (directory is None) else os.path.abspath(directory))))
coreclr_setup_args.verify(args, 'scratch_root', (lambda unused: True), 'Error setting scratch_root', modify_arg=(lambda directory: (os.path.join(coreclr_setup_args.diff_root, '_', 'pmi') if (directory is None) else os.path.abspath(directory))))
coreclr_setup_args.verify(args, 'skip_baseline_build', (lambda unused: True), 'Error setting baseline build')
coreclr_setup_args.verify(args, 'skip_diffs', (lambda unused: True), 'Error setting skip_diffs')
coreclr_setup_args.verify(args, 'target_branch', (lambda unused: True), 'Error setting target_branch')
coreclr_setup_args.verify(args, 'commit_hash', (lambda unused: True), 'Error setting commit_hash')
coreclr_setup_args.verify(args, 'ci_arch', (lambda ci_arch: (ci_arch in (coreclr_setup_args.valid_arches + ['x86_arm_altjit', 'x64_arm64_altjit']))), 'Error setting ci_arch')
args = (coreclr_setup_args.arch, coreclr_setup_args.ci_arch, coreclr_setup_args.build_type, coreclr_setup_args.base_root, coreclr_setup_args.diff_root, coreclr_setup_args.scratch_root, coreclr_setup_args.skip_baseline_build, coreclr_setup_args.skip_diffs, coreclr_setup_args.target_branch, coreclr_setup_args.commit_hash)
log('Configuration:')
log((' arch: %s' % coreclr_setup_args.arch))
log((' ci_arch: %s' % coreclr_setup_args.ci_arch))
log((' build_type: %s' % coreclr_setup_args.build_type))
log((' base_root: %s' % coreclr_setup_args.base_root))
log((' diff_root: %s' % coreclr_setup_args.diff_root))
log((' scratch_root: %s' % coreclr_setup_args.scratch_root))
log((' skip_baseline_build: %s' % coreclr_setup_args.skip_baseline_build))
log((' skip_diffs: %s' % coreclr_setup_args.skip_diffs))
log((' target_branch: %s' % coreclr_setup_args.target_branch))
log((' commit_hash: %s' % coreclr_setup_args.commit_hash))
return args
|
Validate all of the arguments parsed.
Args:
args (argparser.ArgumentParser) : Args parsed by the argument parser.
Returns:
args (CoreclrArguments) : Args parsed
Notes:
If the arguments are valid then return them all in a tuple. If not,
raise an exception stating x argument is incorrect.
|
src/tests/Common/scripts/run-pmi-diffs.py
|
validate_args
|
DarkBullNull/runtime
| 9,402
|
python
|
def validate_args(args):
' Validate all of the arguments parsed.\n Args:\n args (argparser.ArgumentParser) : Args parsed by the argument parser.\n Returns:\n args (CoreclrArguments) : Args parsed\n Notes:\n If the arguments are valid then return them all in a tuple. If not,\n raise an exception stating x argument is incorrect.\n '
coreclr_setup_args = CoreclrArguments(args, require_built_test_dir=False, require_built_core_root=True, require_built_product_dir=False)
coreclr_setup_args.verify(args, 'base_root', (lambda directory: (os.path.isdir(directory) if (directory is not None) else True)), 'Base root is not a valid directory')
coreclr_setup_args.verify(args, 'diff_root', (lambda directory: (os.path.isdir(directory) if (directory is not None) else True)), 'Diff root is not a valid directory', modify_arg=(lambda directory: (nth_dirname(os.path.abspath(sys.argv[0]), 3) if (directory is None) else os.path.abspath(directory))))
coreclr_setup_args.verify(args, 'scratch_root', (lambda unused: True), 'Error setting scratch_root', modify_arg=(lambda directory: (os.path.join(coreclr_setup_args.diff_root, '_', 'pmi') if (directory is None) else os.path.abspath(directory))))
coreclr_setup_args.verify(args, 'skip_baseline_build', (lambda unused: True), 'Error setting baseline build')
coreclr_setup_args.verify(args, 'skip_diffs', (lambda unused: True), 'Error setting skip_diffs')
coreclr_setup_args.verify(args, 'target_branch', (lambda unused: True), 'Error setting target_branch')
coreclr_setup_args.verify(args, 'commit_hash', (lambda unused: True), 'Error setting commit_hash')
coreclr_setup_args.verify(args, 'ci_arch', (lambda ci_arch: (ci_arch in (coreclr_setup_args.valid_arches + ['x86_arm_altjit', 'x64_arm64_altjit']))), 'Error setting ci_arch')
args = (coreclr_setup_args.arch, coreclr_setup_args.ci_arch, coreclr_setup_args.build_type, coreclr_setup_args.base_root, coreclr_setup_args.diff_root, coreclr_setup_args.scratch_root, coreclr_setup_args.skip_baseline_build, coreclr_setup_args.skip_diffs, coreclr_setup_args.target_branch, coreclr_setup_args.commit_hash)
log('Configuration:')
log((' arch: %s' % coreclr_setup_args.arch))
log((' ci_arch: %s' % coreclr_setup_args.ci_arch))
log((' build_type: %s' % coreclr_setup_args.build_type))
log((' base_root: %s' % coreclr_setup_args.base_root))
log((' diff_root: %s' % coreclr_setup_args.diff_root))
log((' scratch_root: %s' % coreclr_setup_args.scratch_root))
log((' skip_baseline_build: %s' % coreclr_setup_args.skip_baseline_build))
log((' skip_diffs: %s' % coreclr_setup_args.skip_diffs))
log((' target_branch: %s' % coreclr_setup_args.target_branch))
log((' commit_hash: %s' % coreclr_setup_args.commit_hash))
return args
|
def validate_args(args):
' Validate all of the arguments parsed.\n Args:\n args (argparser.ArgumentParser) : Args parsed by the argument parser.\n Returns:\n args (CoreclrArguments) : Args parsed\n Notes:\n If the arguments are valid then return them all in a tuple. If not,\n raise an exception stating x argument is incorrect.\n '
coreclr_setup_args = CoreclrArguments(args, require_built_test_dir=False, require_built_core_root=True, require_built_product_dir=False)
coreclr_setup_args.verify(args, 'base_root', (lambda directory: (os.path.isdir(directory) if (directory is not None) else True)), 'Base root is not a valid directory')
coreclr_setup_args.verify(args, 'diff_root', (lambda directory: (os.path.isdir(directory) if (directory is not None) else True)), 'Diff root is not a valid directory', modify_arg=(lambda directory: (nth_dirname(os.path.abspath(sys.argv[0]), 3) if (directory is None) else os.path.abspath(directory))))
coreclr_setup_args.verify(args, 'scratch_root', (lambda unused: True), 'Error setting scratch_root', modify_arg=(lambda directory: (os.path.join(coreclr_setup_args.diff_root, '_', 'pmi') if (directory is None) else os.path.abspath(directory))))
coreclr_setup_args.verify(args, 'skip_baseline_build', (lambda unused: True), 'Error setting baseline build')
coreclr_setup_args.verify(args, 'skip_diffs', (lambda unused: True), 'Error setting skip_diffs')
coreclr_setup_args.verify(args, 'target_branch', (lambda unused: True), 'Error setting target_branch')
coreclr_setup_args.verify(args, 'commit_hash', (lambda unused: True), 'Error setting commit_hash')
coreclr_setup_args.verify(args, 'ci_arch', (lambda ci_arch: (ci_arch in (coreclr_setup_args.valid_arches + ['x86_arm_altjit', 'x64_arm64_altjit']))), 'Error setting ci_arch')
args = (coreclr_setup_args.arch, coreclr_setup_args.ci_arch, coreclr_setup_args.build_type, coreclr_setup_args.base_root, coreclr_setup_args.diff_root, coreclr_setup_args.scratch_root, coreclr_setup_args.skip_baseline_build, coreclr_setup_args.skip_diffs, coreclr_setup_args.target_branch, coreclr_setup_args.commit_hash)
log('Configuration:')
log((' arch: %s' % coreclr_setup_args.arch))
log((' ci_arch: %s' % coreclr_setup_args.ci_arch))
log((' build_type: %s' % coreclr_setup_args.build_type))
log((' base_root: %s' % coreclr_setup_args.base_root))
log((' diff_root: %s' % coreclr_setup_args.diff_root))
log((' scratch_root: %s' % coreclr_setup_args.scratch_root))
log((' skip_baseline_build: %s' % coreclr_setup_args.skip_baseline_build))
log((' skip_diffs: %s' % coreclr_setup_args.skip_diffs))
log((' target_branch: %s' % coreclr_setup_args.target_branch))
log((' commit_hash: %s' % coreclr_setup_args.commit_hash))
return args<|docstring|>Validate all of the arguments parsed.
Args:
args (argparser.ArgumentParser) : Args parsed by the argument parser.
Returns:
args (CoreclrArguments) : Args parsed
Notes:
If the arguments are valid then return them all in a tuple. If not,
raise an exception stating x argument is incorrect.<|endoftext|>
|
13987d6e91ce864b15682ca3fdb84b57bcc6b3d5e701bba157bd57baf0053cfe
|
def nth_dirname(path, n):
' Find the Nth parent directory of the given path\n Args:\n path (str): path name containing at least N components\n n (int): num of basenames to remove\n Returns:\n outpath (str): path with the last n components removed\n Notes:\n If n is 0, path is returned unmodified\n '
assert (n >= 0)
for i in range(0, n):
path = os.path.dirname(path)
return path
|
Find the Nth parent directory of the given path
Args:
path (str): path name containing at least N components
n (int): num of basenames to remove
Returns:
outpath (str): path with the last n components removed
Notes:
If n is 0, path is returned unmodified
|
src/tests/Common/scripts/run-pmi-diffs.py
|
nth_dirname
|
DarkBullNull/runtime
| 9,402
|
python
|
def nth_dirname(path, n):
' Find the Nth parent directory of the given path\n Args:\n path (str): path name containing at least N components\n n (int): num of basenames to remove\n Returns:\n outpath (str): path with the last n components removed\n Notes:\n If n is 0, path is returned unmodified\n '
assert (n >= 0)
for i in range(0, n):
path = os.path.dirname(path)
return path
|
def nth_dirname(path, n):
' Find the Nth parent directory of the given path\n Args:\n path (str): path name containing at least N components\n n (int): num of basenames to remove\n Returns:\n outpath (str): path with the last n components removed\n Notes:\n If n is 0, path is returned unmodified\n '
assert (n >= 0)
for i in range(0, n):
path = os.path.dirname(path)
return path<|docstring|>Find the Nth parent directory of the given path
Args:
path (str): path name containing at least N components
n (int): num of basenames to remove
Returns:
outpath (str): path with the last n components removed
Notes:
If n is 0, path is returned unmodified<|endoftext|>
|
587bd0f7c2f20aa6747635b3c68169fa928c139b2b25d1e017d6afc2ab71818f
|
def log(message):
' Print logging information\n Args:\n message (str): message to be printed\n '
print(('[%s]: %s' % (sys.argv[0], message)))
|
Print logging information
Args:
message (str): message to be printed
|
src/tests/Common/scripts/run-pmi-diffs.py
|
log
|
DarkBullNull/runtime
| 9,402
|
python
|
def log(message):
' Print logging information\n Args:\n message (str): message to be printed\n '
print(('[%s]: %s' % (sys.argv[0], message)))
|
def log(message):
' Print logging information\n Args:\n message (str): message to be printed\n '
print(('[%s]: %s' % (sys.argv[0], message)))<|docstring|>Print logging information
Args:
message (str): message to be printed<|endoftext|>
|
af81e884a09ededdeff912ed4b2e19ae42a62b51ee27ef48535d69e60ed74b84
|
def copy_files(source_dir, target_dir):
' Copy any files in the source_dir to the target_dir.\n The copy is not recursive.\n The directories must already exist.\n Args:\n source_dir (str): source directory path\n target_dir (str): target directory path\n Returns:\n Nothing\n '
global testing
assert os.path.isdir(source_dir)
assert os.path.isdir(target_dir)
for source_filename in os.listdir(source_dir):
source_pathname = os.path.join(source_dir, source_filename)
if os.path.isfile(source_pathname):
target_pathname = os.path.join(target_dir, source_filename)
log(('Copy: %s => %s' % (source_pathname, target_pathname)))
if (not testing):
shutil.copy2(source_pathname, target_pathname)
|
Copy any files in the source_dir to the target_dir.
The copy is not recursive.
The directories must already exist.
Args:
source_dir (str): source directory path
target_dir (str): target directory path
Returns:
Nothing
|
src/tests/Common/scripts/run-pmi-diffs.py
|
copy_files
|
DarkBullNull/runtime
| 9,402
|
python
|
def copy_files(source_dir, target_dir):
' Copy any files in the source_dir to the target_dir.\n The copy is not recursive.\n The directories must already exist.\n Args:\n source_dir (str): source directory path\n target_dir (str): target directory path\n Returns:\n Nothing\n '
global testing
assert os.path.isdir(source_dir)
assert os.path.isdir(target_dir)
for source_filename in os.listdir(source_dir):
source_pathname = os.path.join(source_dir, source_filename)
if os.path.isfile(source_pathname):
target_pathname = os.path.join(target_dir, source_filename)
log(('Copy: %s => %s' % (source_pathname, target_pathname)))
if (not testing):
shutil.copy2(source_pathname, target_pathname)
|
def copy_files(source_dir, target_dir):
' Copy any files in the source_dir to the target_dir.\n The copy is not recursive.\n The directories must already exist.\n Args:\n source_dir (str): source directory path\n target_dir (str): target directory path\n Returns:\n Nothing\n '
global testing
assert os.path.isdir(source_dir)
assert os.path.isdir(target_dir)
for source_filename in os.listdir(source_dir):
source_pathname = os.path.join(source_dir, source_filename)
if os.path.isfile(source_pathname):
target_pathname = os.path.join(target_dir, source_filename)
log(('Copy: %s => %s' % (source_pathname, target_pathname)))
if (not testing):
shutil.copy2(source_pathname, target_pathname)<|docstring|>Copy any files in the source_dir to the target_dir.
The copy is not recursive.
The directories must already exist.
Args:
source_dir (str): source directory path
target_dir (str): target directory path
Returns:
Nothing<|endoftext|>
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.