repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
listlengths
20
707
docstring
stringlengths
3
17.3k
docstring_tokens
listlengths
3
222
sha
stringlengths
40
40
url
stringlengths
87
242
partition
stringclasses
1 value
idx
int64
0
252k
tulir/mautrix-python
mautrix_appservice/intent_api.py
HTTPAPI.bot_intent
def bot_intent(self) -> "IntentAPI": """ Get the intent API for the appservice bot. Returns: The IntentAPI for the appservice bot. """ if not self._bot_intent: self._bot_intent = IntentAPI(self.bot_mxid, self, state_store=self.state_store, log=self.intent_log) return self._bot_intent
python
def bot_intent(self) -> "IntentAPI": """ Get the intent API for the appservice bot. Returns: The IntentAPI for the appservice bot. """ if not self._bot_intent: self._bot_intent = IntentAPI(self.bot_mxid, self, state_store=self.state_store, log=self.intent_log) return self._bot_intent
[ "def", "bot_intent", "(", "self", ")", "->", "\"IntentAPI\"", ":", "if", "not", "self", ".", "_bot_intent", ":", "self", ".", "_bot_intent", "=", "IntentAPI", "(", "self", ".", "bot_mxid", ",", "self", ",", "state_store", "=", "self", ".", "state_store", ...
Get the intent API for the appservice bot. Returns: The IntentAPI for the appservice bot.
[ "Get", "the", "intent", "API", "for", "the", "appservice", "bot", "." ]
21bb0870e4103dd03ecc61396ce02adb9301f382
https://github.com/tulir/mautrix-python/blob/21bb0870e4103dd03ecc61396ce02adb9301f382/mautrix_appservice/intent_api.py#L92-L102
train
30,800
tulir/mautrix-python
mautrix_appservice/intent_api.py
HTTPAPI.intent
def intent(self, user: str = None, token: Optional[str] = None) -> "IntentAPI": """ Get the intent API for a specific user. Args: user: The Matrix ID of the user whose intent API to get. Returns: The IntentAPI for the given user. """ if self.is_real_user: raise ValueError("Can't get child intent of real user") if token: return IntentAPI(user, self.real_user(user, token), self.bot_intent(), self.state_store, self.intent_log) return IntentAPI(user, self.user(user), self.bot_intent(), self.state_store, self.intent_log)
python
def intent(self, user: str = None, token: Optional[str] = None) -> "IntentAPI": """ Get the intent API for a specific user. Args: user: The Matrix ID of the user whose intent API to get. Returns: The IntentAPI for the given user. """ if self.is_real_user: raise ValueError("Can't get child intent of real user") if token: return IntentAPI(user, self.real_user(user, token), self.bot_intent(), self.state_store, self.intent_log) return IntentAPI(user, self.user(user), self.bot_intent(), self.state_store, self.intent_log)
[ "def", "intent", "(", "self", ",", "user", ":", "str", "=", "None", ",", "token", ":", "Optional", "[", "str", "]", "=", "None", ")", "->", "\"IntentAPI\"", ":", "if", "self", ".", "is_real_user", ":", "raise", "ValueError", "(", "\"Can't get child inten...
Get the intent API for a specific user. Args: user: The Matrix ID of the user whose intent API to get. Returns: The IntentAPI for the given user.
[ "Get", "the", "intent", "API", "for", "a", "specific", "user", "." ]
21bb0870e4103dd03ecc61396ce02adb9301f382
https://github.com/tulir/mautrix-python/blob/21bb0870e4103dd03ecc61396ce02adb9301f382/mautrix_appservice/intent_api.py#L104-L120
train
30,801
tulir/mautrix-python
mautrix_appservice/intent_api.py
HTTPAPI.request
def request(self, method: str, path: str, content: Optional[Union[dict, bytes, str]] = None, timestamp: Optional[int] = None, external_url: Optional[str] = None, headers: Optional[Dict[str, str]] = None, query_params: Optional[Dict[str, Any]] = None, api_path: str = "/_matrix/client/r0") -> Awaitable[dict]: """ Make a raw HTTP request. Args: method: The HTTP method to use. path: The API endpoint to call. Does not include the base path (e.g. /_matrix/client/r0). content: The content to post as a dict (json) or bytes/str (raw). timestamp: The timestamp query param used for timestamp massaging. external_url: The external_url field to send in the content (only applicable if content is dict). headers: The dict of HTTP headers to send. query_params: The dict of query parameters to send. api_path: The base API path. Returns: The response as a dict. """ content = content or {} headers = headers or {} query_params = query_params or {} query_params["access_token"] = self.token if timestamp is not None: if isinstance(timestamp, datetime): timestamp = int(timestamp.replace(tzinfo=timezone.utc).timestamp() * 1000) query_params["ts"] = timestamp if isinstance(content, dict) and external_url is not None: content["external_url"] = external_url method = method.upper() if method not in ["GET", "PUT", "DELETE", "POST"]: raise MatrixError("Unsupported HTTP method: %s" % method) if "Content-Type" not in headers: headers["Content-Type"] = "application/json" if headers.get("Content-Type", None) == "application/json": content = json.dumps(content) if self.identity and not self.is_real_user: query_params["user_id"] = self.identity self._log_request(method, path, content, query_params) endpoint = self.base_url + api_path + path return self._send(method, endpoint, content, query_params, headers or {})
python
def request(self, method: str, path: str, content: Optional[Union[dict, bytes, str]] = None, timestamp: Optional[int] = None, external_url: Optional[str] = None, headers: Optional[Dict[str, str]] = None, query_params: Optional[Dict[str, Any]] = None, api_path: str = "/_matrix/client/r0") -> Awaitable[dict]: """ Make a raw HTTP request. Args: method: The HTTP method to use. path: The API endpoint to call. Does not include the base path (e.g. /_matrix/client/r0). content: The content to post as a dict (json) or bytes/str (raw). timestamp: The timestamp query param used for timestamp massaging. external_url: The external_url field to send in the content (only applicable if content is dict). headers: The dict of HTTP headers to send. query_params: The dict of query parameters to send. api_path: The base API path. Returns: The response as a dict. """ content = content or {} headers = headers or {} query_params = query_params or {} query_params["access_token"] = self.token if timestamp is not None: if isinstance(timestamp, datetime): timestamp = int(timestamp.replace(tzinfo=timezone.utc).timestamp() * 1000) query_params["ts"] = timestamp if isinstance(content, dict) and external_url is not None: content["external_url"] = external_url method = method.upper() if method not in ["GET", "PUT", "DELETE", "POST"]: raise MatrixError("Unsupported HTTP method: %s" % method) if "Content-Type" not in headers: headers["Content-Type"] = "application/json" if headers.get("Content-Type", None) == "application/json": content = json.dumps(content) if self.identity and not self.is_real_user: query_params["user_id"] = self.identity self._log_request(method, path, content, query_params) endpoint = self.base_url + api_path + path return self._send(method, endpoint, content, query_params, headers or {})
[ "def", "request", "(", "self", ",", "method", ":", "str", ",", "path", ":", "str", ",", "content", ":", "Optional", "[", "Union", "[", "dict", ",", "bytes", ",", "str", "]", "]", "=", "None", ",", "timestamp", ":", "Optional", "[", "int", "]", "=...
Make a raw HTTP request. Args: method: The HTTP method to use. path: The API endpoint to call. Does not include the base path (e.g. /_matrix/client/r0). content: The content to post as a dict (json) or bytes/str (raw). timestamp: The timestamp query param used for timestamp massaging. external_url: The external_url field to send in the content (only applicable if content is dict). headers: The dict of HTTP headers to send. query_params: The dict of query parameters to send. api_path: The base API path. Returns: The response as a dict.
[ "Make", "a", "raw", "HTTP", "request", "." ]
21bb0870e4103dd03ecc61396ce02adb9301f382
https://github.com/tulir/mautrix-python/blob/21bb0870e4103dd03ecc61396ce02adb9301f382/mautrix_appservice/intent_api.py#L149-L198
train
30,802
secnot/rectpack
rectpack/guillotine.py
Guillotine._add_section
def _add_section(self, section): """Adds a new section to the free section list, but before that and if section merge is enabled, tries to join the rectangle with all existing sections, if successful the resulting section is again merged with the remaining sections until the operation fails. The result is then appended to the list. Arguments: section (Rectangle): New free section. """ section.rid = 0 plen = 0 while self._merge and self._sections and plen != len(self._sections): plen = len(self._sections) self._sections = [s for s in self._sections if not section.join(s)] self._sections.append(section)
python
def _add_section(self, section): """Adds a new section to the free section list, but before that and if section merge is enabled, tries to join the rectangle with all existing sections, if successful the resulting section is again merged with the remaining sections until the operation fails. The result is then appended to the list. Arguments: section (Rectangle): New free section. """ section.rid = 0 plen = 0 while self._merge and self._sections and plen != len(self._sections): plen = len(self._sections) self._sections = [s for s in self._sections if not section.join(s)] self._sections.append(section)
[ "def", "_add_section", "(", "self", ",", "section", ")", ":", "section", ".", "rid", "=", "0", "plen", "=", "0", "while", "self", ".", "_merge", "and", "self", ".", "_sections", "and", "plen", "!=", "len", "(", "self", ".", "_sections", ")", ":", "...
Adds a new section to the free section list, but before that and if section merge is enabled, tries to join the rectangle with all existing sections, if successful the resulting section is again merged with the remaining sections until the operation fails. The result is then appended to the list. Arguments: section (Rectangle): New free section.
[ "Adds", "a", "new", "section", "to", "the", "free", "section", "list", "but", "before", "that", "and", "if", "section", "merge", "is", "enabled", "tries", "to", "join", "the", "rectangle", "with", "all", "existing", "sections", "if", "successful", "the", "...
21d46be48fd453500ea49de699bc9eabc427bdf7
https://github.com/secnot/rectpack/blob/21d46be48fd453500ea49de699bc9eabc427bdf7/rectpack/guillotine.py#L24-L40
train
30,803
secnot/rectpack
rectpack/enclose.py
Enclose._refine_candidate
def _refine_candidate(self, width, height): """ Use bottom-left packing algorithm to find a lower height for the container. Arguments: width height Returns: tuple (width, height, PackingAlgorithm): """ packer = newPacker(PackingMode.Offline, PackingBin.BFF, pack_algo=self._pack_algo, sort_algo=SORT_LSIDE, rotation=self._rotation) packer.add_bin(width, height) for r in self._rectangles: packer.add_rect(*r) packer.pack() # Check all rectangles where packed if len(packer[0]) != len(self._rectangles): return None # Find highest rectangle new_height = max(packer[0], key=lambda x: x.top).top return(width, new_height, packer)
python
def _refine_candidate(self, width, height): """ Use bottom-left packing algorithm to find a lower height for the container. Arguments: width height Returns: tuple (width, height, PackingAlgorithm): """ packer = newPacker(PackingMode.Offline, PackingBin.BFF, pack_algo=self._pack_algo, sort_algo=SORT_LSIDE, rotation=self._rotation) packer.add_bin(width, height) for r in self._rectangles: packer.add_rect(*r) packer.pack() # Check all rectangles where packed if len(packer[0]) != len(self._rectangles): return None # Find highest rectangle new_height = max(packer[0], key=lambda x: x.top).top return(width, new_height, packer)
[ "def", "_refine_candidate", "(", "self", ",", "width", ",", "height", ")", ":", "packer", "=", "newPacker", "(", "PackingMode", ".", "Offline", ",", "PackingBin", ".", "BFF", ",", "pack_algo", "=", "self", ".", "_pack_algo", ",", "sort_algo", "=", "SORT_LS...
Use bottom-left packing algorithm to find a lower height for the container. Arguments: width height Returns: tuple (width, height, PackingAlgorithm):
[ "Use", "bottom", "-", "left", "packing", "algorithm", "to", "find", "a", "lower", "height", "for", "the", "container", "." ]
21d46be48fd453500ea49de699bc9eabc427bdf7
https://github.com/secnot/rectpack/blob/21d46be48fd453500ea49de699bc9eabc427bdf7/rectpack/enclose.py#L89-L117
train
30,804
secnot/rectpack
rectpack/maxrects.py
MaxRects._generate_splits
def _generate_splits(self, m, r): """ When a rectangle is placed inside a maximal rectangle, it stops being one and up to 4 new maximal rectangles may appear depending on the placement. _generate_splits calculates them. Arguments: m (Rectangle): max_rect rectangle r (Rectangle): rectangle placed Returns: list : list containing new maximal rectangles or an empty list """ new_rects = [] if r.left > m.left: new_rects.append(Rectangle(m.left, m.bottom, r.left-m.left, m.height)) if r.right < m.right: new_rects.append(Rectangle(r.right, m.bottom, m.right-r.right, m.height)) if r.top < m.top: new_rects.append(Rectangle(m.left, r.top, m.width, m.top-r.top)) if r.bottom > m.bottom: new_rects.append(Rectangle(m.left, m.bottom, m.width, r.bottom-m.bottom)) return new_rects
python
def _generate_splits(self, m, r): """ When a rectangle is placed inside a maximal rectangle, it stops being one and up to 4 new maximal rectangles may appear depending on the placement. _generate_splits calculates them. Arguments: m (Rectangle): max_rect rectangle r (Rectangle): rectangle placed Returns: list : list containing new maximal rectangles or an empty list """ new_rects = [] if r.left > m.left: new_rects.append(Rectangle(m.left, m.bottom, r.left-m.left, m.height)) if r.right < m.right: new_rects.append(Rectangle(r.right, m.bottom, m.right-r.right, m.height)) if r.top < m.top: new_rects.append(Rectangle(m.left, r.top, m.width, m.top-r.top)) if r.bottom > m.bottom: new_rects.append(Rectangle(m.left, m.bottom, m.width, r.bottom-m.bottom)) return new_rects
[ "def", "_generate_splits", "(", "self", ",", "m", ",", "r", ")", ":", "new_rects", "=", "[", "]", "if", "r", ".", "left", ">", "m", ".", "left", ":", "new_rects", ".", "append", "(", "Rectangle", "(", "m", ".", "left", ",", "m", ".", "bottom", ...
When a rectangle is placed inside a maximal rectangle, it stops being one and up to 4 new maximal rectangles may appear depending on the placement. _generate_splits calculates them. Arguments: m (Rectangle): max_rect rectangle r (Rectangle): rectangle placed Returns: list : list containing new maximal rectangles or an empty list
[ "When", "a", "rectangle", "is", "placed", "inside", "a", "maximal", "rectangle", "it", "stops", "being", "one", "and", "up", "to", "4", "new", "maximal", "rectangles", "may", "appear", "depending", "on", "the", "placement", ".", "_generate_splits", "calculates...
21d46be48fd453500ea49de699bc9eabc427bdf7
https://github.com/secnot/rectpack/blob/21d46be48fd453500ea49de699bc9eabc427bdf7/rectpack/maxrects.py#L70-L94
train
30,805
secnot/rectpack
rectpack/maxrects.py
MaxRects._remove_duplicates
def _remove_duplicates(self): """ Remove every maximal rectangle contained by another one. """ contained = set() for m1, m2 in itertools.combinations(self._max_rects, 2): if m1.contains(m2): contained.add(m2) elif m2.contains(m1): contained.add(m1) # Remove from max_rects self._max_rects = [m for m in self._max_rects if m not in contained]
python
def _remove_duplicates(self): """ Remove every maximal rectangle contained by another one. """ contained = set() for m1, m2 in itertools.combinations(self._max_rects, 2): if m1.contains(m2): contained.add(m2) elif m2.contains(m1): contained.add(m1) # Remove from max_rects self._max_rects = [m for m in self._max_rects if m not in contained]
[ "def", "_remove_duplicates", "(", "self", ")", ":", "contained", "=", "set", "(", ")", "for", "m1", ",", "m2", "in", "itertools", ".", "combinations", "(", "self", ".", "_max_rects", ",", "2", ")", ":", "if", "m1", ".", "contains", "(", "m2", ")", ...
Remove every maximal rectangle contained by another one.
[ "Remove", "every", "maximal", "rectangle", "contained", "by", "another", "one", "." ]
21d46be48fd453500ea49de699bc9eabc427bdf7
https://github.com/secnot/rectpack/blob/21d46be48fd453500ea49de699bc9eabc427bdf7/rectpack/maxrects.py#L118-L130
train
30,806
secnot/rectpack
rectpack/maxrects.py
MaxRects.fitness
def fitness(self, width, height): """ Metric used to rate how much space is wasted if a rectangle is placed. Returns a value greater or equal to zero, the smaller the value the more 'fit' is the rectangle. If the rectangle can't be placed, returns None. Arguments: width (int, float): Rectangle width height (int, float): Rectangle height Returns: int, float: Rectangle fitness None: Rectangle can't be placed """ assert(width > 0 and height > 0) rect, max_rect = self._select_position(width, height) if rect is None: return None # Return fitness return self._rect_fitness(max_rect, rect.width, rect.height)
python
def fitness(self, width, height): """ Metric used to rate how much space is wasted if a rectangle is placed. Returns a value greater or equal to zero, the smaller the value the more 'fit' is the rectangle. If the rectangle can't be placed, returns None. Arguments: width (int, float): Rectangle width height (int, float): Rectangle height Returns: int, float: Rectangle fitness None: Rectangle can't be placed """ assert(width > 0 and height > 0) rect, max_rect = self._select_position(width, height) if rect is None: return None # Return fitness return self._rect_fitness(max_rect, rect.width, rect.height)
[ "def", "fitness", "(", "self", ",", "width", ",", "height", ")", ":", "assert", "(", "width", ">", "0", "and", "height", ">", "0", ")", "rect", ",", "max_rect", "=", "self", ".", "_select_position", "(", "width", ",", "height", ")", "if", "rect", "...
Metric used to rate how much space is wasted if a rectangle is placed. Returns a value greater or equal to zero, the smaller the value the more 'fit' is the rectangle. If the rectangle can't be placed, returns None. Arguments: width (int, float): Rectangle width height (int, float): Rectangle height Returns: int, float: Rectangle fitness None: Rectangle can't be placed
[ "Metric", "used", "to", "rate", "how", "much", "space", "is", "wasted", "if", "a", "rectangle", "is", "placed", ".", "Returns", "a", "value", "greater", "or", "equal", "to", "zero", "the", "smaller", "the", "value", "the", "more", "fit", "is", "the", "...
21d46be48fd453500ea49de699bc9eabc427bdf7
https://github.com/secnot/rectpack/blob/21d46be48fd453500ea49de699bc9eabc427bdf7/rectpack/maxrects.py#L132-L153
train
30,807
secnot/rectpack
rectpack/maxrects.py
MaxRectsBl._select_position
def _select_position(self, w, h): """ Select the position where the y coordinate of the top of the rectangle is lower, if there are severtal pick the one with the smallest x coordinate """ fitn = ((m.y+h, m.x, w, h, m) for m in self._max_rects if self._rect_fitness(m, w, h) is not None) fitr = ((m.y+w, m.x, h, w, m) for m in self._max_rects if self._rect_fitness(m, h, w) is not None) if not self.rot: fitr = [] fit = itertools.chain(fitn, fitr) try: _, _, w, h, m = min(fit, key=first_item) except ValueError: return None, None return Rectangle(m.x, m.y, w, h), m
python
def _select_position(self, w, h): """ Select the position where the y coordinate of the top of the rectangle is lower, if there are severtal pick the one with the smallest x coordinate """ fitn = ((m.y+h, m.x, w, h, m) for m in self._max_rects if self._rect_fitness(m, w, h) is not None) fitr = ((m.y+w, m.x, h, w, m) for m in self._max_rects if self._rect_fitness(m, h, w) is not None) if not self.rot: fitr = [] fit = itertools.chain(fitn, fitr) try: _, _, w, h, m = min(fit, key=first_item) except ValueError: return None, None return Rectangle(m.x, m.y, w, h), m
[ "def", "_select_position", "(", "self", ",", "w", ",", "h", ")", ":", "fitn", "=", "(", "(", "m", ".", "y", "+", "h", ",", "m", ".", "x", ",", "w", ",", "h", ",", "m", ")", "for", "m", "in", "self", ".", "_max_rects", "if", "self", ".", "...
Select the position where the y coordinate of the top of the rectangle is lower, if there are severtal pick the one with the smallest x coordinate
[ "Select", "the", "position", "where", "the", "y", "coordinate", "of", "the", "top", "of", "the", "rectangle", "is", "lower", "if", "there", "are", "severtal", "pick", "the", "one", "with", "the", "smallest", "x", "coordinate" ]
21d46be48fd453500ea49de699bc9eabc427bdf7
https://github.com/secnot/rectpack/blob/21d46be48fd453500ea49de699bc9eabc427bdf7/rectpack/maxrects.py#L196-L217
train
30,808
secnot/rectpack
rectpack/pack_algo.py
PackingAlgorithm._fits_surface
def _fits_surface(self, width, height): """ Test surface is big enough to place a rectangle Arguments: width (int, float): Rectangle width height (int, float): Rectangle height Returns: boolean: True if it could be placed, False otherwise """ assert(width > 0 and height > 0) if self.rot and (width > self.width or height > self.height): width, height = height, width if width > self.width or height > self.height: return False else: return True
python
def _fits_surface(self, width, height): """ Test surface is big enough to place a rectangle Arguments: width (int, float): Rectangle width height (int, float): Rectangle height Returns: boolean: True if it could be placed, False otherwise """ assert(width > 0 and height > 0) if self.rot and (width > self.width or height > self.height): width, height = height, width if width > self.width or height > self.height: return False else: return True
[ "def", "_fits_surface", "(", "self", ",", "width", ",", "height", ")", ":", "assert", "(", "width", ">", "0", "and", "height", ">", "0", ")", "if", "self", ".", "rot", "and", "(", "width", ">", "self", ".", "width", "or", "height", ">", "self", "...
Test surface is big enough to place a rectangle Arguments: width (int, float): Rectangle width height (int, float): Rectangle height Returns: boolean: True if it could be placed, False otherwise
[ "Test", "surface", "is", "big", "enough", "to", "place", "a", "rectangle" ]
21d46be48fd453500ea49de699bc9eabc427bdf7
https://github.com/secnot/rectpack/blob/21d46be48fd453500ea49de699bc9eabc427bdf7/rectpack/pack_algo.py#L31-L49
train
30,809
secnot/rectpack
rectpack/pack_algo.py
PackingAlgorithm.validate_packing
def validate_packing(self): """ Check for collisions between rectangles, also check all are placed inside surface. """ surface = Rectangle(0, 0, self.width, self.height) for r in self: if not surface.contains(r): raise Exception("Rectangle placed outside surface") rectangles = [r for r in self] if len(rectangles) <= 1: return for r1 in range(0, len(rectangles)-2): for r2 in range(r1+1, len(rectangles)-1): if rectangles[r1].intersects(rectangles[r2]): raise Exception("Rectangle collision detected")
python
def validate_packing(self): """ Check for collisions between rectangles, also check all are placed inside surface. """ surface = Rectangle(0, 0, self.width, self.height) for r in self: if not surface.contains(r): raise Exception("Rectangle placed outside surface") rectangles = [r for r in self] if len(rectangles) <= 1: return for r1 in range(0, len(rectangles)-2): for r2 in range(r1+1, len(rectangles)-1): if rectangles[r1].intersects(rectangles[r2]): raise Exception("Rectangle collision detected")
[ "def", "validate_packing", "(", "self", ")", ":", "surface", "=", "Rectangle", "(", "0", ",", "0", ",", "self", ".", "width", ",", "self", ".", "height", ")", "for", "r", "in", "self", ":", "if", "not", "surface", ".", "contains", "(", "r", ")", ...
Check for collisions between rectangles, also check all are placed inside surface.
[ "Check", "for", "collisions", "between", "rectangles", "also", "check", "all", "are", "placed", "inside", "surface", "." ]
21d46be48fd453500ea49de699bc9eabc427bdf7
https://github.com/secnot/rectpack/blob/21d46be48fd453500ea49de699bc9eabc427bdf7/rectpack/pack_algo.py#L111-L130
train
30,810
secnot/rectpack
rectpack/waste.py
WasteManager.add_waste
def add_waste(self, x, y, width, height): """Add new waste section""" self._add_section(Rectangle(x, y, width, height))
python
def add_waste(self, x, y, width, height): """Add new waste section""" self._add_section(Rectangle(x, y, width, height))
[ "def", "add_waste", "(", "self", ",", "x", ",", "y", ",", "width", ",", "height", ")", ":", "self", ".", "_add_section", "(", "Rectangle", "(", "x", ",", "y", ",", "width", ",", "height", ")", ")" ]
Add new waste section
[ "Add", "new", "waste", "section" ]
21d46be48fd453500ea49de699bc9eabc427bdf7
https://github.com/secnot/rectpack/blob/21d46be48fd453500ea49de699bc9eabc427bdf7/rectpack/waste.py#L11-L13
train
30,811
secnot/rectpack
rectpack/geometry.py
Point.distance
def distance(self, point): """ Calculate distance to another point """ return sqrt((self.x-point.x)**2+(self.y-point.y)**2)
python
def distance(self, point): """ Calculate distance to another point """ return sqrt((self.x-point.x)**2+(self.y-point.y)**2)
[ "def", "distance", "(", "self", ",", "point", ")", ":", "return", "sqrt", "(", "(", "self", ".", "x", "-", "point", ".", "x", ")", "**", "2", "+", "(", "self", ".", "y", "-", "point", ".", "y", ")", "**", "2", ")" ]
Calculate distance to another point
[ "Calculate", "distance", "to", "another", "point" ]
21d46be48fd453500ea49de699bc9eabc427bdf7
https://github.com/secnot/rectpack/blob/21d46be48fd453500ea49de699bc9eabc427bdf7/rectpack/geometry.py#L19-L23
train
30,812
secnot/rectpack
rectpack/geometry.py
Rectangle.move
def move(self, x, y): """ Move Rectangle to x,y coordinates Arguments: x (int, float): X coordinate y (int, float): Y coordinate """ self.x = x self.y = y
python
def move(self, x, y): """ Move Rectangle to x,y coordinates Arguments: x (int, float): X coordinate y (int, float): Y coordinate """ self.x = x self.y = y
[ "def", "move", "(", "self", ",", "x", ",", "y", ")", ":", "self", ".", "x", "=", "x", "self", ".", "y", "=", "y" ]
Move Rectangle to x,y coordinates Arguments: x (int, float): X coordinate y (int, float): Y coordinate
[ "Move", "Rectangle", "to", "x", "y", "coordinates" ]
21d46be48fd453500ea49de699bc9eabc427bdf7
https://github.com/secnot/rectpack/blob/21d46be48fd453500ea49de699bc9eabc427bdf7/rectpack/geometry.py#L226-L235
train
30,813
secnot/rectpack
rectpack/geometry.py
Rectangle.contains
def contains(self, rect): """ Tests if another rectangle is contained by this one Arguments: rect (Rectangle): The other rectangle Returns: bool: True if it is container, False otherwise """ return (rect.y >= self.y and \ rect.x >= self.x and \ rect.y+rect.height <= self.y+self.height and \ rect.x+rect.width <= self.x+self.width)
python
def contains(self, rect): """ Tests if another rectangle is contained by this one Arguments: rect (Rectangle): The other rectangle Returns: bool: True if it is container, False otherwise """ return (rect.y >= self.y and \ rect.x >= self.x and \ rect.y+rect.height <= self.y+self.height and \ rect.x+rect.width <= self.x+self.width)
[ "def", "contains", "(", "self", ",", "rect", ")", ":", "return", "(", "rect", ".", "y", ">=", "self", ".", "y", "and", "rect", ".", "x", ">=", "self", ".", "x", "and", "rect", ".", "y", "+", "rect", ".", "height", "<=", "self", ".", "y", "+",...
Tests if another rectangle is contained by this one Arguments: rect (Rectangle): The other rectangle Returns: bool: True if it is container, False otherwise
[ "Tests", "if", "another", "rectangle", "is", "contained", "by", "this", "one" ]
21d46be48fd453500ea49de699bc9eabc427bdf7
https://github.com/secnot/rectpack/blob/21d46be48fd453500ea49de699bc9eabc427bdf7/rectpack/geometry.py#L237-L250
train
30,814
secnot/rectpack
rectpack/geometry.py
Rectangle.intersects
def intersects(self, rect, edges=False): """ Detect intersections between this rectangle and rect. Args: rect (Rectangle): Rectangle to test for intersections. edges (bool): Accept edge touching rectangles as intersects or not Returns: bool: True if the rectangles intersect, False otherwise """ # Not even touching if (self.bottom > rect.top or \ self.top < rect.bottom or \ self.left > rect.right or \ self.right < rect.left): return False # Discard edge intersects if not edges: if (self.bottom == rect.top or \ self.top == rect.bottom or \ self.left == rect.right or \ self.right == rect.left): return False # Discard corner intersects if (self.left == rect.right and self.bottom == rect.top or \ self.left == rect.right and rect.bottom == self.top or \ rect.left == self.right and self.bottom == rect.top or \ rect.left == self.right and rect.bottom == self.top): return False return True
python
def intersects(self, rect, edges=False): """ Detect intersections between this rectangle and rect. Args: rect (Rectangle): Rectangle to test for intersections. edges (bool): Accept edge touching rectangles as intersects or not Returns: bool: True if the rectangles intersect, False otherwise """ # Not even touching if (self.bottom > rect.top or \ self.top < rect.bottom or \ self.left > rect.right or \ self.right < rect.left): return False # Discard edge intersects if not edges: if (self.bottom == rect.top or \ self.top == rect.bottom or \ self.left == rect.right or \ self.right == rect.left): return False # Discard corner intersects if (self.left == rect.right and self.bottom == rect.top or \ self.left == rect.right and rect.bottom == self.top or \ rect.left == self.right and self.bottom == rect.top or \ rect.left == self.right and rect.bottom == self.top): return False return True
[ "def", "intersects", "(", "self", ",", "rect", ",", "edges", "=", "False", ")", ":", "# Not even touching", "if", "(", "self", ".", "bottom", ">", "rect", ".", "top", "or", "self", ".", "top", "<", "rect", ".", "bottom", "or", "self", ".", "left", ...
Detect intersections between this rectangle and rect. Args: rect (Rectangle): Rectangle to test for intersections. edges (bool): Accept edge touching rectangles as intersects or not Returns: bool: True if the rectangles intersect, False otherwise
[ "Detect", "intersections", "between", "this", "rectangle", "and", "rect", "." ]
21d46be48fd453500ea49de699bc9eabc427bdf7
https://github.com/secnot/rectpack/blob/21d46be48fd453500ea49de699bc9eabc427bdf7/rectpack/geometry.py#L252-L285
train
30,815
secnot/rectpack
rectpack/geometry.py
Rectangle.join
def join(self, other): """ Try to join a rectangle to this one, if the result is also a rectangle and the operation is successful and this rectangle is modified to the union. Arguments: other (Rectangle): Rectangle to join Returns: bool: True when successfully joined, False otherwise """ if self.contains(other): return True if other.contains(self): self.x = other.x self.y = other.y self.width = other.width self.height = other.height return True if not self.intersects(other, edges=True): return False # Other rectangle is Up/Down from this if self.left == other.left and self.width == other.width: y_min = min(self.bottom, other.bottom) y_max = max(self.top, other.top) self.y = y_min self.height = y_max-y_min return True # Other rectangle is Right/Left from this if self.bottom == other.bottom and self.height == other.height: x_min = min(self.left, other.left) x_max = max(self.right, other.right) self.x = x_min self.width = x_max-x_min return True return False
python
def join(self, other): """ Try to join a rectangle to this one, if the result is also a rectangle and the operation is successful and this rectangle is modified to the union. Arguments: other (Rectangle): Rectangle to join Returns: bool: True when successfully joined, False otherwise """ if self.contains(other): return True if other.contains(self): self.x = other.x self.y = other.y self.width = other.width self.height = other.height return True if not self.intersects(other, edges=True): return False # Other rectangle is Up/Down from this if self.left == other.left and self.width == other.width: y_min = min(self.bottom, other.bottom) y_max = max(self.top, other.top) self.y = y_min self.height = y_max-y_min return True # Other rectangle is Right/Left from this if self.bottom == other.bottom and self.height == other.height: x_min = min(self.left, other.left) x_max = max(self.right, other.right) self.x = x_min self.width = x_max-x_min return True return False
[ "def", "join", "(", "self", ",", "other", ")", ":", "if", "self", ".", "contains", "(", "other", ")", ":", "return", "True", "if", "other", ".", "contains", "(", "self", ")", ":", "self", ".", "x", "=", "other", ".", "x", "self", ".", "y", "=",...
Try to join a rectangle to this one, if the result is also a rectangle and the operation is successful and this rectangle is modified to the union. Arguments: other (Rectangle): Rectangle to join Returns: bool: True when successfully joined, False otherwise
[ "Try", "to", "join", "a", "rectangle", "to", "this", "one", "if", "the", "result", "is", "also", "a", "rectangle", "and", "the", "operation", "is", "successful", "and", "this", "rectangle", "is", "modified", "to", "the", "union", "." ]
21d46be48fd453500ea49de699bc9eabc427bdf7
https://github.com/secnot/rectpack/blob/21d46be48fd453500ea49de699bc9eabc427bdf7/rectpack/geometry.py#L313-L353
train
30,816
secnot/rectpack
rectpack/packer.py
newPacker
def newPacker(mode=PackingMode.Offline, bin_algo=PackingBin.BBF, pack_algo=MaxRectsBssf, sort_algo=SORT_AREA, rotation=True): """ Packer factory helper function Arguments: mode (PackingMode): Packing mode Online: Rectangles are packed as soon are they are added Offline: Rectangles aren't packed untils pack() is called bin_algo (PackingBin): Bin selection heuristic pack_algo (PackingAlgorithm): Algorithm used rotation (boolean): Enable or disable rectangle rotation. Returns: Packer: Initialized packer instance. """ packer_class = None # Online Mode if mode == PackingMode.Online: sort_algo=None if bin_algo == PackingBin.BNF: packer_class = PackerOnlineBNF elif bin_algo == PackingBin.BFF: packer_class = PackerOnlineBFF elif bin_algo == PackingBin.BBF: packer_class = PackerOnlineBBF else: raise AttributeError("Unsupported bin selection heuristic") # Offline Mode elif mode == PackingMode.Offline: if bin_algo == PackingBin.BNF: packer_class = PackerBNF elif bin_algo == PackingBin.BFF: packer_class = PackerBFF elif bin_algo == PackingBin.BBF: packer_class = PackerBBF elif bin_algo == PackingBin.Global: packer_class = PackerGlobal sort_algo=None else: raise AttributeError("Unsupported bin selection heuristic") else: raise AttributeError("Unknown packing mode.") if sort_algo: return packer_class(pack_algo=pack_algo, sort_algo=sort_algo, rotation=rotation) else: return packer_class(pack_algo=pack_algo, rotation=rotation)
python
def newPacker(mode=PackingMode.Offline, bin_algo=PackingBin.BBF, pack_algo=MaxRectsBssf, sort_algo=SORT_AREA, rotation=True): """ Packer factory helper function Arguments: mode (PackingMode): Packing mode Online: Rectangles are packed as soon are they are added Offline: Rectangles aren't packed untils pack() is called bin_algo (PackingBin): Bin selection heuristic pack_algo (PackingAlgorithm): Algorithm used rotation (boolean): Enable or disable rectangle rotation. Returns: Packer: Initialized packer instance. """ packer_class = None # Online Mode if mode == PackingMode.Online: sort_algo=None if bin_algo == PackingBin.BNF: packer_class = PackerOnlineBNF elif bin_algo == PackingBin.BFF: packer_class = PackerOnlineBFF elif bin_algo == PackingBin.BBF: packer_class = PackerOnlineBBF else: raise AttributeError("Unsupported bin selection heuristic") # Offline Mode elif mode == PackingMode.Offline: if bin_algo == PackingBin.BNF: packer_class = PackerBNF elif bin_algo == PackingBin.BFF: packer_class = PackerBFF elif bin_algo == PackingBin.BBF: packer_class = PackerBBF elif bin_algo == PackingBin.Global: packer_class = PackerGlobal sort_algo=None else: raise AttributeError("Unsupported bin selection heuristic") else: raise AttributeError("Unknown packing mode.") if sort_algo: return packer_class(pack_algo=pack_algo, sort_algo=sort_algo, rotation=rotation) else: return packer_class(pack_algo=pack_algo, rotation=rotation)
[ "def", "newPacker", "(", "mode", "=", "PackingMode", ".", "Offline", ",", "bin_algo", "=", "PackingBin", ".", "BBF", ",", "pack_algo", "=", "MaxRectsBssf", ",", "sort_algo", "=", "SORT_AREA", ",", "rotation", "=", "True", ")", ":", "packer_class", "=", "No...
Packer factory helper function Arguments: mode (PackingMode): Packing mode Online: Rectangles are packed as soon are they are added Offline: Rectangles aren't packed untils pack() is called bin_algo (PackingBin): Bin selection heuristic pack_algo (PackingAlgorithm): Algorithm used rotation (boolean): Enable or disable rectangle rotation. Returns: Packer: Initialized packer instance.
[ "Packer", "factory", "helper", "function" ]
21d46be48fd453500ea49de699bc9eabc427bdf7
https://github.com/secnot/rectpack/blob/21d46be48fd453500ea49de699bc9eabc427bdf7/rectpack/packer.py#L524-L578
train
30,817
secnot/rectpack
rectpack/packer.py
PackerOnline._new_open_bin
def _new_open_bin(self, width=None, height=None, rid=None): """ Extract the next empty bin and append it to open bins Returns: PackingAlgorithm: Initialized empty packing bin. None: No bin big enough for the rectangle was found """ factories_to_delete = set() # new_bin = None for key, binfac in self._empty_bins.items(): # Only return the new bin if the rect fits. # (If width or height is None, caller doesn't know the size.) if not binfac.fits_inside(width, height): continue # Create bin and add to open_bins new_bin = binfac.new_bin() if new_bin is None: continue self._open_bins.append(new_bin) # If the factory was depleted mark for deletion if binfac.is_empty(): factories_to_delete.add(key) break # Delete marked factories for f in factories_to_delete: del self._empty_bins[f] return new_bin
python
def _new_open_bin(self, width=None, height=None, rid=None): """ Extract the next empty bin and append it to open bins Returns: PackingAlgorithm: Initialized empty packing bin. None: No bin big enough for the rectangle was found """ factories_to_delete = set() # new_bin = None for key, binfac in self._empty_bins.items(): # Only return the new bin if the rect fits. # (If width or height is None, caller doesn't know the size.) if not binfac.fits_inside(width, height): continue # Create bin and add to open_bins new_bin = binfac.new_bin() if new_bin is None: continue self._open_bins.append(new_bin) # If the factory was depleted mark for deletion if binfac.is_empty(): factories_to_delete.add(key) break # Delete marked factories for f in factories_to_delete: del self._empty_bins[f] return new_bin
[ "def", "_new_open_bin", "(", "self", ",", "width", "=", "None", ",", "height", "=", "None", ",", "rid", "=", "None", ")", ":", "factories_to_delete", "=", "set", "(", ")", "#", "new_bin", "=", "None", "for", "key", ",", "binfac", "in", "self", ".", ...
Extract the next empty bin and append it to open bins Returns: PackingAlgorithm: Initialized empty packing bin. None: No bin big enough for the rectangle was found
[ "Extract", "the", "next", "empty", "bin", "and", "append", "it", "to", "open", "bins" ]
21d46be48fd453500ea49de699bc9eabc427bdf7
https://github.com/secnot/rectpack/blob/21d46be48fd453500ea49de699bc9eabc427bdf7/rectpack/packer.py#L227-L261
train
30,818
secnot/rectpack
rectpack/packer.py
PackerGlobal._find_best_fit
def _find_best_fit(self, pbin): """ Return best fitness rectangle from rectangles packing _sorted_rect list Arguments: pbin (PackingAlgorithm): Packing bin Returns: key of the rectangle with best fitness """ fit = ((pbin.fitness(r[0], r[1]), k) for k, r in self._sorted_rect.items()) fit = (f for f in fit if f[0] is not None) try: _, rect = min(fit, key=self.first_item) return rect except ValueError: return None
python
def _find_best_fit(self, pbin): """ Return best fitness rectangle from rectangles packing _sorted_rect list Arguments: pbin (PackingAlgorithm): Packing bin Returns: key of the rectangle with best fitness """ fit = ((pbin.fitness(r[0], r[1]), k) for k, r in self._sorted_rect.items()) fit = (f for f in fit if f[0] is not None) try: _, rect = min(fit, key=self.first_item) return rect except ValueError: return None
[ "def", "_find_best_fit", "(", "self", ",", "pbin", ")", ":", "fit", "=", "(", "(", "pbin", ".", "fitness", "(", "r", "[", "0", "]", ",", "r", "[", "1", "]", ")", ",", "k", ")", "for", "k", ",", "r", "in", "self", ".", "_sorted_rect", ".", "...
Return best fitness rectangle from rectangles packing _sorted_rect list Arguments: pbin (PackingAlgorithm): Packing bin Returns: key of the rectangle with best fitness
[ "Return", "best", "fitness", "rectangle", "from", "rectangles", "packing", "_sorted_rect", "list" ]
21d46be48fd453500ea49de699bc9eabc427bdf7
https://github.com/secnot/rectpack/blob/21d46be48fd453500ea49de699bc9eabc427bdf7/rectpack/packer.py#L404-L420
train
30,819
secnot/rectpack
rectpack/packer.py
PackerGlobal._new_open_bin
def _new_open_bin(self, remaining_rect): """ Extract the next bin where at least one of the rectangles in rem Arguments: remaining_rect (dict): rectangles not placed yet Returns: PackingAlgorithm: Initialized empty packing bin. None: No bin big enough for the rectangle was found """ factories_to_delete = set() # new_bin = None for key, binfac in self._empty_bins.items(): # Only return the new bin if at least one of the remaining # rectangles fit inside. a_rectangle_fits = False for _, rect in remaining_rect.items(): if binfac.fits_inside(rect[0], rect[1]): a_rectangle_fits = True break if not a_rectangle_fits: factories_to_delete.add(key) continue # Create bin and add to open_bins new_bin = binfac.new_bin() if new_bin is None: continue self._open_bins.append(new_bin) # If the factory was depleted mark for deletion if binfac.is_empty(): factories_to_delete.add(key) break # Delete marked factories for f in factories_to_delete: del self._empty_bins[f] return new_bin
python
def _new_open_bin(self, remaining_rect): """ Extract the next bin where at least one of the rectangles in rem Arguments: remaining_rect (dict): rectangles not placed yet Returns: PackingAlgorithm: Initialized empty packing bin. None: No bin big enough for the rectangle was found """ factories_to_delete = set() # new_bin = None for key, binfac in self._empty_bins.items(): # Only return the new bin if at least one of the remaining # rectangles fit inside. a_rectangle_fits = False for _, rect in remaining_rect.items(): if binfac.fits_inside(rect[0], rect[1]): a_rectangle_fits = True break if not a_rectangle_fits: factories_to_delete.add(key) continue # Create bin and add to open_bins new_bin = binfac.new_bin() if new_bin is None: continue self._open_bins.append(new_bin) # If the factory was depleted mark for deletion if binfac.is_empty(): factories_to_delete.add(key) break # Delete marked factories for f in factories_to_delete: del self._empty_bins[f] return new_bin
[ "def", "_new_open_bin", "(", "self", ",", "remaining_rect", ")", ":", "factories_to_delete", "=", "set", "(", ")", "#", "new_bin", "=", "None", "for", "key", ",", "binfac", "in", "self", ".", "_empty_bins", ".", "items", "(", ")", ":", "# Only return the n...
Extract the next bin where at least one of the rectangles in rem Arguments: remaining_rect (dict): rectangles not placed yet Returns: PackingAlgorithm: Initialized empty packing bin. None: No bin big enough for the rectangle was found
[ "Extract", "the", "next", "bin", "where", "at", "least", "one", "of", "the", "rectangles", "in", "rem" ]
21d46be48fd453500ea49de699bc9eabc427bdf7
https://github.com/secnot/rectpack/blob/21d46be48fd453500ea49de699bc9eabc427bdf7/rectpack/packer.py#L423-L468
train
30,820
secnot/rectpack
rectpack/skyline.py
Skyline._placement_points_generator
def _placement_points_generator(self, skyline, width): """Returns a generator for the x coordinates of all the placement points on the skyline for a given rectangle. WARNING: In some cases could be duplicated points, but it is faster to compute them twice than to remove them. Arguments: skyline (list): Skyline HSegment list width (int, float): Rectangle width Returns: generator """ skyline_r = skyline[-1].right skyline_l = skyline[0].left # Placements using skyline segment left point ppointsl = (s.left for s in skyline if s.left+width <= skyline_r) # Placements using skyline segment right point ppointsr = (s.right-width for s in skyline if s.right-width >= skyline_l) # Merge positions return heapq.merge(ppointsl, ppointsr)
python
def _placement_points_generator(self, skyline, width): """Returns a generator for the x coordinates of all the placement points on the skyline for a given rectangle. WARNING: In some cases could be duplicated points, but it is faster to compute them twice than to remove them. Arguments: skyline (list): Skyline HSegment list width (int, float): Rectangle width Returns: generator """ skyline_r = skyline[-1].right skyline_l = skyline[0].left # Placements using skyline segment left point ppointsl = (s.left for s in skyline if s.left+width <= skyline_r) # Placements using skyline segment right point ppointsr = (s.right-width for s in skyline if s.right-width >= skyline_l) # Merge positions return heapq.merge(ppointsl, ppointsr)
[ "def", "_placement_points_generator", "(", "self", ",", "skyline", ",", "width", ")", ":", "skyline_r", "=", "skyline", "[", "-", "1", "]", ".", "right", "skyline_l", "=", "skyline", "[", "0", "]", ".", "left", "# Placements using skyline segment left point", ...
Returns a generator for the x coordinates of all the placement points on the skyline for a given rectangle. WARNING: In some cases could be duplicated points, but it is faster to compute them twice than to remove them. Arguments: skyline (list): Skyline HSegment list width (int, float): Rectangle width Returns: generator
[ "Returns", "a", "generator", "for", "the", "x", "coordinates", "of", "all", "the", "placement", "points", "on", "the", "skyline", "for", "a", "given", "rectangle", "." ]
21d46be48fd453500ea49de699bc9eabc427bdf7
https://github.com/secnot/rectpack/blob/21d46be48fd453500ea49de699bc9eabc427bdf7/rectpack/skyline.py#L37-L61
train
30,821
secnot/rectpack
rectpack/skyline.py
Skyline._generate_placements
def _generate_placements(self, width, height): """ Generate a list with Arguments: skyline (list): SkylineHSegment list width (number): Returns: tuple (Rectangle, fitness): Rectangle: Rectangle in valid position left_skyline: Index for the skyline under the rectangle left edge. right_skyline: Index for the skyline under the rectangle right edte. """ skyline = self._skyline points = collections.deque() left_index = right_index = 0 # Left and right side skyline index support_height = skyline[0].top support_index = 0 placements = self._placement_points_generator(skyline, width) for p in placements: # If Rectangle's right side changed segment, find new support if p+width > skyline[right_index].right: for right_index in range(right_index+1, len(skyline)): if skyline[right_index].top >= support_height: support_index = right_index support_height = skyline[right_index].top if p+width <= skyline[right_index].right: break # If left side changed segment. if p >= skyline[left_index].right: left_index +=1 # Find new support if the previous one was shifted out. if support_index < left_index: support_index = left_index support_height = skyline[left_index].top for i in range(left_index, right_index+1): if skyline[i].top >= support_height: support_index = i support_height = skyline[i].top # Add point if there is enought room at the top if support_height+height <= self.height: points.append((Rectangle(p, support_height, width, height),\ left_index, right_index)) return points
python
def _generate_placements(self, width, height): """ Generate a list with Arguments: skyline (list): SkylineHSegment list width (number): Returns: tuple (Rectangle, fitness): Rectangle: Rectangle in valid position left_skyline: Index for the skyline under the rectangle left edge. right_skyline: Index for the skyline under the rectangle right edte. """ skyline = self._skyline points = collections.deque() left_index = right_index = 0 # Left and right side skyline index support_height = skyline[0].top support_index = 0 placements = self._placement_points_generator(skyline, width) for p in placements: # If Rectangle's right side changed segment, find new support if p+width > skyline[right_index].right: for right_index in range(right_index+1, len(skyline)): if skyline[right_index].top >= support_height: support_index = right_index support_height = skyline[right_index].top if p+width <= skyline[right_index].right: break # If left side changed segment. if p >= skyline[left_index].right: left_index +=1 # Find new support if the previous one was shifted out. if support_index < left_index: support_index = left_index support_height = skyline[left_index].top for i in range(left_index, right_index+1): if skyline[i].top >= support_height: support_index = i support_height = skyline[i].top # Add point if there is enought room at the top if support_height+height <= self.height: points.append((Rectangle(p, support_height, width, height),\ left_index, right_index)) return points
[ "def", "_generate_placements", "(", "self", ",", "width", ",", "height", ")", ":", "skyline", "=", "self", ".", "_skyline", "points", "=", "collections", ".", "deque", "(", ")", "left_index", "=", "right_index", "=", "0", "# Left and right side skyline index", ...
Generate a list with Arguments: skyline (list): SkylineHSegment list width (number): Returns: tuple (Rectangle, fitness): Rectangle: Rectangle in valid position left_skyline: Index for the skyline under the rectangle left edge. right_skyline: Index for the skyline under the rectangle right edte.
[ "Generate", "a", "list", "with" ]
21d46be48fd453500ea49de699bc9eabc427bdf7
https://github.com/secnot/rectpack/blob/21d46be48fd453500ea49de699bc9eabc427bdf7/rectpack/skyline.py#L63-L115
train
30,822
secnot/rectpack
rectpack/skyline.py
Skyline._select_position
def _select_position(self, width, height): """ Search for the placement with the bes fitness for the rectangle. Returns: tuple (Rectangle, fitness) - Rectangle placed in the fittest position None - Rectangle couldn't be placed """ positions = self._generate_placements(width, height) if self.rot and width != height: positions += self._generate_placements(height, width) if not positions: return None, None return min(((p[0], self._rect_fitness(*p))for p in positions), key=operator.itemgetter(1))
python
def _select_position(self, width, height): """ Search for the placement with the bes fitness for the rectangle. Returns: tuple (Rectangle, fitness) - Rectangle placed in the fittest position None - Rectangle couldn't be placed """ positions = self._generate_placements(width, height) if self.rot and width != height: positions += self._generate_placements(height, width) if not positions: return None, None return min(((p[0], self._rect_fitness(*p))for p in positions), key=operator.itemgetter(1))
[ "def", "_select_position", "(", "self", ",", "width", ",", "height", ")", ":", "positions", "=", "self", ".", "_generate_placements", "(", "width", ",", "height", ")", "if", "self", ".", "rot", "and", "width", "!=", "height", ":", "positions", "+=", "sel...
Search for the placement with the bes fitness for the rectangle. Returns: tuple (Rectangle, fitness) - Rectangle placed in the fittest position None - Rectangle couldn't be placed
[ "Search", "for", "the", "placement", "with", "the", "bes", "fitness", "for", "the", "rectangle", "." ]
21d46be48fd453500ea49de699bc9eabc427bdf7
https://github.com/secnot/rectpack/blob/21d46be48fd453500ea49de699bc9eabc427bdf7/rectpack/skyline.py#L176-L190
train
30,823
secnot/rectpack
rectpack/skyline.py
Skyline.fitness
def fitness(self, width, height): """Search for the best fitness """ assert(width > 0 and height >0) if width > max(self.width, self.height) or\ height > max(self.height, self.width): return None # If there is room in wasted space, FREE PACKING!! if self._waste_management: if self._waste.fitness(width, height) is not None: return 0 # Get best fitness segment, for normal rectangle, and for # rotated rectangle if rotation is enabled. rect, fitness = self._select_position(width, height) return fitness
python
def fitness(self, width, height): """Search for the best fitness """ assert(width > 0 and height >0) if width > max(self.width, self.height) or\ height > max(self.height, self.width): return None # If there is room in wasted space, FREE PACKING!! if self._waste_management: if self._waste.fitness(width, height) is not None: return 0 # Get best fitness segment, for normal rectangle, and for # rotated rectangle if rotation is enabled. rect, fitness = self._select_position(width, height) return fitness
[ "def", "fitness", "(", "self", ",", "width", ",", "height", ")", ":", "assert", "(", "width", ">", "0", "and", "height", ">", "0", ")", "if", "width", ">", "max", "(", "self", ".", "width", ",", "self", ".", "height", ")", "or", "height", ">", ...
Search for the best fitness
[ "Search", "for", "the", "best", "fitness" ]
21d46be48fd453500ea49de699bc9eabc427bdf7
https://github.com/secnot/rectpack/blob/21d46be48fd453500ea49de699bc9eabc427bdf7/rectpack/skyline.py#L192-L208
train
30,824
secnot/rectpack
rectpack/skyline.py
Skyline.add_rect
def add_rect(self, width, height, rid=None): """ Add new rectangle """ assert(width > 0 and height > 0) if width > max(self.width, self.height) or\ height > max(self.height, self.width): return None rect = None # If Waste managment is enabled, first try to place the rectangle there if self._waste_management: rect = self._waste.add_rect(width, height, rid) # Get best possible rectangle position if not rect: rect, _ = self._select_position(width, height) if rect: self._add_skyline(rect) if rect is None: return None # Store rectangle, and recalculate skyline rect.rid = rid self.rectangles.append(rect) return rect
python
def add_rect(self, width, height, rid=None): """ Add new rectangle """ assert(width > 0 and height > 0) if width > max(self.width, self.height) or\ height > max(self.height, self.width): return None rect = None # If Waste managment is enabled, first try to place the rectangle there if self._waste_management: rect = self._waste.add_rect(width, height, rid) # Get best possible rectangle position if not rect: rect, _ = self._select_position(width, height) if rect: self._add_skyline(rect) if rect is None: return None # Store rectangle, and recalculate skyline rect.rid = rid self.rectangles.append(rect) return rect
[ "def", "add_rect", "(", "self", ",", "width", ",", "height", ",", "rid", "=", "None", ")", ":", "assert", "(", "width", ">", "0", "and", "height", ">", "0", ")", "if", "width", ">", "max", "(", "self", ".", "width", ",", "self", ".", "height", ...
Add new rectangle
[ "Add", "new", "rectangle" ]
21d46be48fd453500ea49de699bc9eabc427bdf7
https://github.com/secnot/rectpack/blob/21d46be48fd453500ea49de699bc9eabc427bdf7/rectpack/skyline.py#L210-L236
train
30,825
spotify/docker_interface
docker_interface/util.py
abspath
def abspath(path, ref=None): """ Create an absolute path. Parameters ---------- path : str absolute or relative path with respect to `ref` ref : str or None reference path if `path` is relative Returns ------- path : str absolute path Raises ------ ValueError if an absolute path cannot be constructed """ if ref: path = os.path.join(ref, path) if not os.path.isabs(path): raise ValueError("expected an absolute path but got '%s'" % path) return path
python
def abspath(path, ref=None): """ Create an absolute path. Parameters ---------- path : str absolute or relative path with respect to `ref` ref : str or None reference path if `path` is relative Returns ------- path : str absolute path Raises ------ ValueError if an absolute path cannot be constructed """ if ref: path = os.path.join(ref, path) if not os.path.isabs(path): raise ValueError("expected an absolute path but got '%s'" % path) return path
[ "def", "abspath", "(", "path", ",", "ref", "=", "None", ")", ":", "if", "ref", ":", "path", "=", "os", ".", "path", ".", "join", "(", "ref", ",", "path", ")", "if", "not", "os", ".", "path", ".", "isabs", "(", "path", ")", ":", "raise", "Valu...
Create an absolute path. Parameters ---------- path : str absolute or relative path with respect to `ref` ref : str or None reference path if `path` is relative Returns ------- path : str absolute path Raises ------ ValueError if an absolute path cannot be constructed
[ "Create", "an", "absolute", "path", "." ]
4df80e1fe072d958020080d32c16551ff7703d51
https://github.com/spotify/docker_interface/blob/4df80e1fe072d958020080d32c16551ff7703d51/docker_interface/util.py#L29-L54
train
30,826
spotify/docker_interface
docker_interface/util.py
split_path
def split_path(path, ref=None): """ Split a path into its components. Parameters ---------- path : str absolute or relative path with respect to `ref` ref : str or None reference path if `path` is relative Returns ------- list : str components of the path """ path = abspath(path, ref) return path.strip(os.path.sep).split(os.path.sep)
python
def split_path(path, ref=None): """ Split a path into its components. Parameters ---------- path : str absolute or relative path with respect to `ref` ref : str or None reference path if `path` is relative Returns ------- list : str components of the path """ path = abspath(path, ref) return path.strip(os.path.sep).split(os.path.sep)
[ "def", "split_path", "(", "path", ",", "ref", "=", "None", ")", ":", "path", "=", "abspath", "(", "path", ",", "ref", ")", "return", "path", ".", "strip", "(", "os", ".", "path", ".", "sep", ")", ".", "split", "(", "os", ".", "path", ".", "sep"...
Split a path into its components. Parameters ---------- path : str absolute or relative path with respect to `ref` ref : str or None reference path if `path` is relative Returns ------- list : str components of the path
[ "Split", "a", "path", "into", "its", "components", "." ]
4df80e1fe072d958020080d32c16551ff7703d51
https://github.com/spotify/docker_interface/blob/4df80e1fe072d958020080d32c16551ff7703d51/docker_interface/util.py#L57-L74
train
30,827
spotify/docker_interface
docker_interface/util.py
get_value
def get_value(instance, path, ref=None): """ Get the value from `instance` at the given `path`. Parameters ---------- instance : dict or list instance from which to retrieve a value path : str path to retrieve a value from ref : str or None reference path if `path` is relative Returns ------- value : value at `path` in `instance` Raises ------ KeyError if `path` is not valid TypeError if a value along the `path` is not a list or dictionary """ for part in split_path(path, ref): if isinstance(instance, list): part = int(part) elif not isinstance(instance, dict): raise TypeError("expected `list` or `dict` but got `%s`" % instance) try: instance = instance[part] except KeyError: raise KeyError(abspath(path, ref)) return instance
python
def get_value(instance, path, ref=None): """ Get the value from `instance` at the given `path`. Parameters ---------- instance : dict or list instance from which to retrieve a value path : str path to retrieve a value from ref : str or None reference path if `path` is relative Returns ------- value : value at `path` in `instance` Raises ------ KeyError if `path` is not valid TypeError if a value along the `path` is not a list or dictionary """ for part in split_path(path, ref): if isinstance(instance, list): part = int(part) elif not isinstance(instance, dict): raise TypeError("expected `list` or `dict` but got `%s`" % instance) try: instance = instance[part] except KeyError: raise KeyError(abspath(path, ref)) return instance
[ "def", "get_value", "(", "instance", ",", "path", ",", "ref", "=", "None", ")", ":", "for", "part", "in", "split_path", "(", "path", ",", "ref", ")", ":", "if", "isinstance", "(", "instance", ",", "list", ")", ":", "part", "=", "int", "(", "part", ...
Get the value from `instance` at the given `path`. Parameters ---------- instance : dict or list instance from which to retrieve a value path : str path to retrieve a value from ref : str or None reference path if `path` is relative Returns ------- value : value at `path` in `instance` Raises ------ KeyError if `path` is not valid TypeError if a value along the `path` is not a list or dictionary
[ "Get", "the", "value", "from", "instance", "at", "the", "given", "path", "." ]
4df80e1fe072d958020080d32c16551ff7703d51
https://github.com/spotify/docker_interface/blob/4df80e1fe072d958020080d32c16551ff7703d51/docker_interface/util.py#L77-L111
train
30,828
spotify/docker_interface
docker_interface/util.py
pop_value
def pop_value(instance, path, ref=None): """ Pop the value from `instance` at the given `path`. Parameters ---------- instance : dict or list instance from which to retrieve a value path : str path to retrieve a value from ref : str or None reference path if `path` is relative Returns ------- value : value at `path` in `instance` """ head, tail = os.path.split(abspath(path, ref)) instance = get_value(instance, head) if isinstance(instance, list): tail = int(tail) return instance.pop(tail)
python
def pop_value(instance, path, ref=None): """ Pop the value from `instance` at the given `path`. Parameters ---------- instance : dict or list instance from which to retrieve a value path : str path to retrieve a value from ref : str or None reference path if `path` is relative Returns ------- value : value at `path` in `instance` """ head, tail = os.path.split(abspath(path, ref)) instance = get_value(instance, head) if isinstance(instance, list): tail = int(tail) return instance.pop(tail)
[ "def", "pop_value", "(", "instance", ",", "path", ",", "ref", "=", "None", ")", ":", "head", ",", "tail", "=", "os", ".", "path", ".", "split", "(", "abspath", "(", "path", ",", "ref", ")", ")", "instance", "=", "get_value", "(", "instance", ",", ...
Pop the value from `instance` at the given `path`. Parameters ---------- instance : dict or list instance from which to retrieve a value path : str path to retrieve a value from ref : str or None reference path if `path` is relative Returns ------- value : value at `path` in `instance`
[ "Pop", "the", "value", "from", "instance", "at", "the", "given", "path", "." ]
4df80e1fe072d958020080d32c16551ff7703d51
https://github.com/spotify/docker_interface/blob/4df80e1fe072d958020080d32c16551ff7703d51/docker_interface/util.py#L114-L136
train
30,829
spotify/docker_interface
docker_interface/util.py
merge
def merge(x, y): """ Merge two dictionaries and raise an error for inconsistencies. Parameters ---------- x : dict dictionary x y : dict dictionary y Returns ------- x : dict merged dictionary Raises ------ ValueError if `x` and `y` are inconsistent """ keys_x = set(x) keys_y = set(y) for key in keys_y - keys_x: x[key] = y[key] for key in keys_x & keys_y: value_x = x[key] value_y = y[key] if isinstance(value_x, dict) and isinstance(value_y, dict): x[key] = merge(value_x, value_y) else: if value_x != value_y: raise ValueError return x
python
def merge(x, y): """ Merge two dictionaries and raise an error for inconsistencies. Parameters ---------- x : dict dictionary x y : dict dictionary y Returns ------- x : dict merged dictionary Raises ------ ValueError if `x` and `y` are inconsistent """ keys_x = set(x) keys_y = set(y) for key in keys_y - keys_x: x[key] = y[key] for key in keys_x & keys_y: value_x = x[key] value_y = y[key] if isinstance(value_x, dict) and isinstance(value_y, dict): x[key] = merge(value_x, value_y) else: if value_x != value_y: raise ValueError return x
[ "def", "merge", "(", "x", ",", "y", ")", ":", "keys_x", "=", "set", "(", "x", ")", "keys_y", "=", "set", "(", "y", ")", "for", "key", "in", "keys_y", "-", "keys_x", ":", "x", "[", "key", "]", "=", "y", "[", "key", "]", "for", "key", "in", ...
Merge two dictionaries and raise an error for inconsistencies. Parameters ---------- x : dict dictionary x y : dict dictionary y Returns ------- x : dict merged dictionary Raises ------ ValueError if `x` and `y` are inconsistent
[ "Merge", "two", "dictionaries", "and", "raise", "an", "error", "for", "inconsistencies", "." ]
4df80e1fe072d958020080d32c16551ff7703d51
https://github.com/spotify/docker_interface/blob/4df80e1fe072d958020080d32c16551ff7703d51/docker_interface/util.py#L181-L218
train
30,830
spotify/docker_interface
docker_interface/util.py
set_default_from_schema
def set_default_from_schema(instance, schema): """ Populate default values on an `instance` given a `schema`. Parameters ---------- instance : dict instance to populate default values for schema : dict JSON schema with default values Returns ------- instance : dict instance with populated default values """ for name, property_ in schema.get('properties', {}).items(): # Set the defaults at this level of the schema if 'default' in property_: instance.setdefault(name, property_['default']) # Descend one level if the property is an object if 'properties' in property_: set_default_from_schema(instance.setdefault(name, {}), property_) return instance
python
def set_default_from_schema(instance, schema): """ Populate default values on an `instance` given a `schema`. Parameters ---------- instance : dict instance to populate default values for schema : dict JSON schema with default values Returns ------- instance : dict instance with populated default values """ for name, property_ in schema.get('properties', {}).items(): # Set the defaults at this level of the schema if 'default' in property_: instance.setdefault(name, property_['default']) # Descend one level if the property is an object if 'properties' in property_: set_default_from_schema(instance.setdefault(name, {}), property_) return instance
[ "def", "set_default_from_schema", "(", "instance", ",", "schema", ")", ":", "for", "name", ",", "property_", "in", "schema", ".", "get", "(", "'properties'", ",", "{", "}", ")", ".", "items", "(", ")", ":", "# Set the defaults at this level of the schema", "if...
Populate default values on an `instance` given a `schema`. Parameters ---------- instance : dict instance to populate default values for schema : dict JSON schema with default values Returns ------- instance : dict instance with populated default values
[ "Populate", "default", "values", "on", "an", "instance", "given", "a", "schema", "." ]
4df80e1fe072d958020080d32c16551ff7703d51
https://github.com/spotify/docker_interface/blob/4df80e1fe072d958020080d32c16551ff7703d51/docker_interface/util.py#L221-L244
train
30,831
spotify/docker_interface
docker_interface/util.py
apply
def apply(instance, func, path=None): """ Apply `func` to all fundamental types of `instance`. Parameters ---------- instance : dict instance to apply functions to func : callable function with two arguments (instance, path) to apply to all fundamental types recursively path : str path in the document (defaults to '/') Returns ------- instance : dict instance after applying `func` to fundamental types """ path = path or os.path.sep if isinstance(instance, list): return [apply(item, func, os.path.join(path, str(i))) for i, item in enumerate(instance)] elif isinstance(instance, dict): return {key: apply(value, func, os.path.join(path, key)) for key, value in instance.items()} return func(instance, path)
python
def apply(instance, func, path=None): """ Apply `func` to all fundamental types of `instance`. Parameters ---------- instance : dict instance to apply functions to func : callable function with two arguments (instance, path) to apply to all fundamental types recursively path : str path in the document (defaults to '/') Returns ------- instance : dict instance after applying `func` to fundamental types """ path = path or os.path.sep if isinstance(instance, list): return [apply(item, func, os.path.join(path, str(i))) for i, item in enumerate(instance)] elif isinstance(instance, dict): return {key: apply(value, func, os.path.join(path, key)) for key, value in instance.items()} return func(instance, path)
[ "def", "apply", "(", "instance", ",", "func", ",", "path", "=", "None", ")", ":", "path", "=", "path", "or", "os", ".", "path", ".", "sep", "if", "isinstance", "(", "instance", ",", "list", ")", ":", "return", "[", "apply", "(", "item", ",", "fun...
Apply `func` to all fundamental types of `instance`. Parameters ---------- instance : dict instance to apply functions to func : callable function with two arguments (instance, path) to apply to all fundamental types recursively path : str path in the document (defaults to '/') Returns ------- instance : dict instance after applying `func` to fundamental types
[ "Apply", "func", "to", "all", "fundamental", "types", "of", "instance", "." ]
4df80e1fe072d958020080d32c16551ff7703d51
https://github.com/spotify/docker_interface/blob/4df80e1fe072d958020080d32c16551ff7703d51/docker_interface/util.py#L247-L270
train
30,832
spotify/docker_interface
docker_interface/util.py
get_free_port
def get_free_port(ports=None): """ Get a free port. Parameters ---------- ports : iterable ports to check (obtain a random port by default) Returns ------- port : int a free port """ if ports is None: with contextlib.closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as _socket: _socket.bind(('', 0)) _, port = _socket.getsockname() return port # Get ports from the specified list for port in ports: with contextlib.closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as _socket: try: _socket.bind(('', port)) return port except socket.error as ex: if ex.errno not in (48, 98): raise raise RuntimeError("could not find a free port")
python
def get_free_port(ports=None): """ Get a free port. Parameters ---------- ports : iterable ports to check (obtain a random port by default) Returns ------- port : int a free port """ if ports is None: with contextlib.closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as _socket: _socket.bind(('', 0)) _, port = _socket.getsockname() return port # Get ports from the specified list for port in ports: with contextlib.closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as _socket: try: _socket.bind(('', port)) return port except socket.error as ex: if ex.errno not in (48, 98): raise raise RuntimeError("could not find a free port")
[ "def", "get_free_port", "(", "ports", "=", "None", ")", ":", "if", "ports", "is", "None", ":", "with", "contextlib", ".", "closing", "(", "socket", ".", "socket", "(", "socket", ".", "AF_INET", ",", "socket", ".", "SOCK_STREAM", ")", ")", "as", "_socke...
Get a free port. Parameters ---------- ports : iterable ports to check (obtain a random port by default) Returns ------- port : int a free port
[ "Get", "a", "free", "port", "." ]
4df80e1fe072d958020080d32c16551ff7703d51
https://github.com/spotify/docker_interface/blob/4df80e1fe072d958020080d32c16551ff7703d51/docker_interface/util.py#L273-L303
train
30,833
spotify/docker_interface
docker_interface/docker_interface.py
build_parameter_parts
def build_parameter_parts(configuration, *parameters): """ Construct command parts for one or more parameters. Parameters ---------- configuration : dict configuration parameters : list list of parameters to create command line arguments for Yields ------ argument : str command line argument """ for parameter in parameters: values = configuration.pop(parameter, []) if values: if not isinstance(values, list): values = [values] for value in values: yield '--%s=%s' % (parameter, value)
python
def build_parameter_parts(configuration, *parameters): """ Construct command parts for one or more parameters. Parameters ---------- configuration : dict configuration parameters : list list of parameters to create command line arguments for Yields ------ argument : str command line argument """ for parameter in parameters: values = configuration.pop(parameter, []) if values: if not isinstance(values, list): values = [values] for value in values: yield '--%s=%s' % (parameter, value)
[ "def", "build_parameter_parts", "(", "configuration", ",", "*", "parameters", ")", ":", "for", "parameter", "in", "parameters", ":", "values", "=", "configuration", ".", "pop", "(", "parameter", ",", "[", "]", ")", "if", "values", ":", "if", "not", "isinst...
Construct command parts for one or more parameters. Parameters ---------- configuration : dict configuration parameters : list list of parameters to create command line arguments for Yields ------ argument : str command line argument
[ "Construct", "command", "parts", "for", "one", "or", "more", "parameters", "." ]
4df80e1fe072d958020080d32c16551ff7703d51
https://github.com/spotify/docker_interface/blob/4df80e1fe072d958020080d32c16551ff7703d51/docker_interface/docker_interface.py#L18-L40
train
30,834
spotify/docker_interface
docker_interface/docker_interface.py
build_dict_parameter_parts
def build_dict_parameter_parts(configuration, *parameters, **defaults): """ Construct command parts for one or more parameters, each of which constitutes an assignment of the form `key=value`. Parameters ---------- configuration : dict configuration parameters : list list of parameters to create command line arguments for defaults : dict default values to use if a parameter is missing Yields ------ argument : str command line argument """ for parameter in parameters: for key, value in configuration.pop(parameter, {}).items(): yield '--%s=%s=%s' % (parameter, key, value)
python
def build_dict_parameter_parts(configuration, *parameters, **defaults): """ Construct command parts for one or more parameters, each of which constitutes an assignment of the form `key=value`. Parameters ---------- configuration : dict configuration parameters : list list of parameters to create command line arguments for defaults : dict default values to use if a parameter is missing Yields ------ argument : str command line argument """ for parameter in parameters: for key, value in configuration.pop(parameter, {}).items(): yield '--%s=%s=%s' % (parameter, key, value)
[ "def", "build_dict_parameter_parts", "(", "configuration", ",", "*", "parameters", ",", "*", "*", "defaults", ")", ":", "for", "parameter", "in", "parameters", ":", "for", "key", ",", "value", "in", "configuration", ".", "pop", "(", "parameter", ",", "{", ...
Construct command parts for one or more parameters, each of which constitutes an assignment of the form `key=value`. Parameters ---------- configuration : dict configuration parameters : list list of parameters to create command line arguments for defaults : dict default values to use if a parameter is missing Yields ------ argument : str command line argument
[ "Construct", "command", "parts", "for", "one", "or", "more", "parameters", "each", "of", "which", "constitutes", "an", "assignment", "of", "the", "form", "key", "=", "value", "." ]
4df80e1fe072d958020080d32c16551ff7703d51
https://github.com/spotify/docker_interface/blob/4df80e1fe072d958020080d32c16551ff7703d51/docker_interface/docker_interface.py#L43-L64
train
30,835
spotify/docker_interface
docker_interface/docker_interface.py
build_docker_run_command
def build_docker_run_command(configuration): """ Translate a declarative docker `configuration` to a `docker run` command. Parameters ---------- configuration : dict configuration Returns ------- args : list sequence of command line arguments to run a command in a container """ parts = configuration.pop('docker').split() parts.append('run') run = configuration.pop('run') # Ensure all env-files have proper paths if 'env-file' in run: run['env-file'] = [os.path.join(configuration['workspace'], env_file) for env_file in run['env-file']] parts.extend(build_parameter_parts( run, 'user', 'workdir', 'rm', 'interactive', 'tty', 'env-file', 'cpu-shares', 'name', 'network', 'label', 'memory', 'entrypoint', 'runtime', 'privileged', 'group-add' )) # Add the mounts # The following code requires docker >= 17.06 '''for mount in run.pop('mount', []): if mount['type'] == 'bind': mount['source'] = os.path.join( configuration['workspace'], mount['source']) parts.extend(['--mount', ",".join(["%s=%s" % item for item in mount.items()])])''' # Add the mounts for mount in run.pop('mount', []): if mount['type'] == 'tmpfs': raise RuntimeError('tmpfs-mounts are currently not supported via the mount ' + 'directive in docker_interface. Consider using the tmpfs ' + 'directive instead.') if mount['type'] == 'bind': mount['source'] = os.path.abspath( os.path.join(configuration['workspace'], mount['source'])) vol_config = '--volume=%s:%s' % (mount['source'], mount['destination']) if 'readonly' in mount and mount['readonly']: vol_config += ':ro' parts.append(vol_config) # Set or forward environment variables for key, value in run.pop('env', {}).items(): if value is None: parts.append('--env=%s' % key) else: parts.append('--env=%s=%s' % (key, value)) parts.append('--env=DOCKER_INTERFACE=true') # Forward ports for publish in run.pop('publish', []): parts.append('--publish=%s:%s:%s' % tuple([ publish.get(key, '') for key in "ip host container".split()])) # Add temporary file systems for tmpfs in run.pop('tmpfs', []): destination = tmpfs['destination'] options = tmpfs.pop('options', []) for key in ['mode', 'size']: if key in tmpfs: options.append('%s=%s' % (key, tmpfs[key])) if options: destination = "%s:%s" % (destination, ",".join(options)) parts.extend(['--tmpfs', destination]) parts.append(run.pop('image')) parts.extend(run.pop('cmd', [])) return parts
python
def build_docker_run_command(configuration): """ Translate a declarative docker `configuration` to a `docker run` command. Parameters ---------- configuration : dict configuration Returns ------- args : list sequence of command line arguments to run a command in a container """ parts = configuration.pop('docker').split() parts.append('run') run = configuration.pop('run') # Ensure all env-files have proper paths if 'env-file' in run: run['env-file'] = [os.path.join(configuration['workspace'], env_file) for env_file in run['env-file']] parts.extend(build_parameter_parts( run, 'user', 'workdir', 'rm', 'interactive', 'tty', 'env-file', 'cpu-shares', 'name', 'network', 'label', 'memory', 'entrypoint', 'runtime', 'privileged', 'group-add' )) # Add the mounts # The following code requires docker >= 17.06 '''for mount in run.pop('mount', []): if mount['type'] == 'bind': mount['source'] = os.path.join( configuration['workspace'], mount['source']) parts.extend(['--mount', ",".join(["%s=%s" % item for item in mount.items()])])''' # Add the mounts for mount in run.pop('mount', []): if mount['type'] == 'tmpfs': raise RuntimeError('tmpfs-mounts are currently not supported via the mount ' + 'directive in docker_interface. Consider using the tmpfs ' + 'directive instead.') if mount['type'] == 'bind': mount['source'] = os.path.abspath( os.path.join(configuration['workspace'], mount['source'])) vol_config = '--volume=%s:%s' % (mount['source'], mount['destination']) if 'readonly' in mount and mount['readonly']: vol_config += ':ro' parts.append(vol_config) # Set or forward environment variables for key, value in run.pop('env', {}).items(): if value is None: parts.append('--env=%s' % key) else: parts.append('--env=%s=%s' % (key, value)) parts.append('--env=DOCKER_INTERFACE=true') # Forward ports for publish in run.pop('publish', []): parts.append('--publish=%s:%s:%s' % tuple([ publish.get(key, '') for key in "ip host container".split()])) # Add temporary file systems for tmpfs in run.pop('tmpfs', []): destination = tmpfs['destination'] options = tmpfs.pop('options', []) for key in ['mode', 'size']: if key in tmpfs: options.append('%s=%s' % (key, tmpfs[key])) if options: destination = "%s:%s" % (destination, ",".join(options)) parts.extend(['--tmpfs', destination]) parts.append(run.pop('image')) parts.extend(run.pop('cmd', [])) return parts
[ "def", "build_docker_run_command", "(", "configuration", ")", ":", "parts", "=", "configuration", ".", "pop", "(", "'docker'", ")", ".", "split", "(", ")", "parts", ".", "append", "(", "'run'", ")", "run", "=", "configuration", ".", "pop", "(", "'run'", ...
Translate a declarative docker `configuration` to a `docker run` command. Parameters ---------- configuration : dict configuration Returns ------- args : list sequence of command line arguments to run a command in a container
[ "Translate", "a", "declarative", "docker", "configuration", "to", "a", "docker", "run", "command", "." ]
4df80e1fe072d958020080d32c16551ff7703d51
https://github.com/spotify/docker_interface/blob/4df80e1fe072d958020080d32c16551ff7703d51/docker_interface/docker_interface.py#L67-L145
train
30,836
spotify/docker_interface
docker_interface/docker_interface.py
build_docker_build_command
def build_docker_build_command(configuration): """ Translate a declarative docker `configuration` to a `docker build` command. Parameters ---------- configuration : dict configuration Returns ------- args : list sequence of command line arguments to build an image """ parts = configuration.pop('docker', 'docker').split() parts.append('build') build = configuration.pop('build') build['path'] = os.path.join(configuration['workspace'], build['path']) build['file'] = os.path.join(build['path'], build['file']) parts.extend(build_parameter_parts( build, 'tag', 'file', 'no-cache', 'quiet', 'cpu-shares', 'memory')) parts.extend(build_dict_parameter_parts(build, 'build-arg')) parts.append(build.pop('path')) return parts
python
def build_docker_build_command(configuration): """ Translate a declarative docker `configuration` to a `docker build` command. Parameters ---------- configuration : dict configuration Returns ------- args : list sequence of command line arguments to build an image """ parts = configuration.pop('docker', 'docker').split() parts.append('build') build = configuration.pop('build') build['path'] = os.path.join(configuration['workspace'], build['path']) build['file'] = os.path.join(build['path'], build['file']) parts.extend(build_parameter_parts( build, 'tag', 'file', 'no-cache', 'quiet', 'cpu-shares', 'memory')) parts.extend(build_dict_parameter_parts(build, 'build-arg')) parts.append(build.pop('path')) return parts
[ "def", "build_docker_build_command", "(", "configuration", ")", ":", "parts", "=", "configuration", ".", "pop", "(", "'docker'", ",", "'docker'", ")", ".", "split", "(", ")", "parts", ".", "append", "(", "'build'", ")", "build", "=", "configuration", ".", ...
Translate a declarative docker `configuration` to a `docker build` command. Parameters ---------- configuration : dict configuration Returns ------- args : list sequence of command line arguments to build an image
[ "Translate", "a", "declarative", "docker", "configuration", "to", "a", "docker", "build", "command", "." ]
4df80e1fe072d958020080d32c16551ff7703d51
https://github.com/spotify/docker_interface/blob/4df80e1fe072d958020080d32c16551ff7703d51/docker_interface/docker_interface.py#L148-L176
train
30,837
spotify/docker_interface
docker_interface/plugins/base.py
Plugin.add_argument
def add_argument(self, parser, path, name=None, schema=None, **kwargs): """ Add an argument to the `parser` based on a schema definition. Parameters ---------- parser : argparse.ArgumentParser parser to add an argument to path : str path in the configuration document to add an argument for name : str or None name of the command line parameter (defaults to the name in the schema) schema : dict JSON schema definition (defaults to the schema of the plugin) Returns ------- arg : command line argument definition """ schema = schema or self.SCHEMA name = name or ('--%s' % os.path.basename(path)) self.arguments[name.strip('-')] = path # Build a path to the help in the schema path = util.split_path(path) path = os.path.sep.join( it.chain([os.path.sep], *zip(it.repeat("properties"), path))) property_ = util.get_value(schema, path) defaults = { 'choices': property_.get('enum'), 'help': property_.get('description') } if 'type' in property_: defaults['type'] = util.TYPES[property_['type']] defaults.update(kwargs) return parser.add_argument(name, **defaults)
python
def add_argument(self, parser, path, name=None, schema=None, **kwargs): """ Add an argument to the `parser` based on a schema definition. Parameters ---------- parser : argparse.ArgumentParser parser to add an argument to path : str path in the configuration document to add an argument for name : str or None name of the command line parameter (defaults to the name in the schema) schema : dict JSON schema definition (defaults to the schema of the plugin) Returns ------- arg : command line argument definition """ schema = schema or self.SCHEMA name = name or ('--%s' % os.path.basename(path)) self.arguments[name.strip('-')] = path # Build a path to the help in the schema path = util.split_path(path) path = os.path.sep.join( it.chain([os.path.sep], *zip(it.repeat("properties"), path))) property_ = util.get_value(schema, path) defaults = { 'choices': property_.get('enum'), 'help': property_.get('description') } if 'type' in property_: defaults['type'] = util.TYPES[property_['type']] defaults.update(kwargs) return parser.add_argument(name, **defaults)
[ "def", "add_argument", "(", "self", ",", "parser", ",", "path", ",", "name", "=", "None", ",", "schema", "=", "None", ",", "*", "*", "kwargs", ")", ":", "schema", "=", "schema", "or", "self", ".", "SCHEMA", "name", "=", "name", "or", "(", "'--%s'",...
Add an argument to the `parser` based on a schema definition. Parameters ---------- parser : argparse.ArgumentParser parser to add an argument to path : str path in the configuration document to add an argument for name : str or None name of the command line parameter (defaults to the name in the schema) schema : dict JSON schema definition (defaults to the schema of the plugin) Returns ------- arg : command line argument definition
[ "Add", "an", "argument", "to", "the", "parser", "based", "on", "a", "schema", "definition", "." ]
4df80e1fe072d958020080d32c16551ff7703d51
https://github.com/spotify/docker_interface/blob/4df80e1fe072d958020080d32c16551ff7703d51/docker_interface/plugins/base.py#L42-L77
train
30,838
spotify/docker_interface
docker_interface/plugins/base.py
Plugin.apply
def apply(self, configuration, schema, args): """ Apply the plugin to the configuration. Inheriting plugins should implement this method to add additional functionality. Parameters ---------- configuration : dict configuration schema : dict JSON schema args : argparse.NameSpace parsed command line arguments Returns ------- configuration : dict updated configuration after applying the plugin """ # Set values from the command line for name, path in self.arguments.items(): value = getattr(args, name.replace('-', '_')) if value is not None: util.set_value(configuration, path, value) return configuration
python
def apply(self, configuration, schema, args): """ Apply the plugin to the configuration. Inheriting plugins should implement this method to add additional functionality. Parameters ---------- configuration : dict configuration schema : dict JSON schema args : argparse.NameSpace parsed command line arguments Returns ------- configuration : dict updated configuration after applying the plugin """ # Set values from the command line for name, path in self.arguments.items(): value = getattr(args, name.replace('-', '_')) if value is not None: util.set_value(configuration, path, value) return configuration
[ "def", "apply", "(", "self", ",", "configuration", ",", "schema", ",", "args", ")", ":", "# Set values from the command line", "for", "name", ",", "path", "in", "self", ".", "arguments", ".", "items", "(", ")", ":", "value", "=", "getattr", "(", "args", ...
Apply the plugin to the configuration. Inheriting plugins should implement this method to add additional functionality. Parameters ---------- configuration : dict configuration schema : dict JSON schema args : argparse.NameSpace parsed command line arguments Returns ------- configuration : dict updated configuration after applying the plugin
[ "Apply", "the", "plugin", "to", "the", "configuration", "." ]
4df80e1fe072d958020080d32c16551ff7703d51
https://github.com/spotify/docker_interface/blob/4df80e1fe072d958020080d32c16551ff7703d51/docker_interface/plugins/base.py#L93-L119
train
30,839
spotify/docker_interface
docker_interface/plugins/base.py
Plugin.load_plugins
def load_plugins(): """ Load all availabe plugins. Returns ------- plugin_cls : dict mapping from plugin names to plugin classes """ plugin_cls = {} for entry_point in pkg_resources.iter_entry_points('docker_interface.plugins'): cls = entry_point.load() assert cls.COMMANDS is not None, \ "plugin '%s' does not define its commands" % entry_point.name assert cls.ORDER is not None, \ "plugin '%s' does not define its priority" % entry_point.name plugin_cls[entry_point.name] = cls return plugin_cls
python
def load_plugins(): """ Load all availabe plugins. Returns ------- plugin_cls : dict mapping from plugin names to plugin classes """ plugin_cls = {} for entry_point in pkg_resources.iter_entry_points('docker_interface.plugins'): cls = entry_point.load() assert cls.COMMANDS is not None, \ "plugin '%s' does not define its commands" % entry_point.name assert cls.ORDER is not None, \ "plugin '%s' does not define its priority" % entry_point.name plugin_cls[entry_point.name] = cls return plugin_cls
[ "def", "load_plugins", "(", ")", ":", "plugin_cls", "=", "{", "}", "for", "entry_point", "in", "pkg_resources", ".", "iter_entry_points", "(", "'docker_interface.plugins'", ")", ":", "cls", "=", "entry_point", ".", "load", "(", ")", "assert", "cls", ".", "CO...
Load all availabe plugins. Returns ------- plugin_cls : dict mapping from plugin names to plugin classes
[ "Load", "all", "availabe", "plugins", "." ]
4df80e1fe072d958020080d32c16551ff7703d51
https://github.com/spotify/docker_interface/blob/4df80e1fe072d958020080d32c16551ff7703d51/docker_interface/plugins/base.py#L122-L139
train
30,840
spotify/docker_interface
docker_interface/plugins/base.py
SubstitutionPlugin.substitute_variables
def substitute_variables(cls, configuration, value, ref): """ Substitute variables in `value` from `configuration` where any path reference is relative to `ref`. Parameters ---------- configuration : dict configuration (required to resolve intra-document references) value : value to resolve substitutions for ref : str path to `value` in the `configuration` Returns ------- value : value after substitution """ if isinstance(value, str): # Substitute all intra-document references while True: match = cls.REF_PATTERN.search(value) if match is None: break path = os.path.join(os.path.dirname(ref), match.group('path')) try: value = value.replace( match.group(0), str(util.get_value(configuration, path))) except KeyError: raise KeyError(path) # Substitute all variable references while True: match = cls.VAR_PATTERN.search(value) if match is None: break value = value.replace( match.group(0), str(util.get_value(cls.VARIABLES, match.group('path'), '/'))) return value
python
def substitute_variables(cls, configuration, value, ref): """ Substitute variables in `value` from `configuration` where any path reference is relative to `ref`. Parameters ---------- configuration : dict configuration (required to resolve intra-document references) value : value to resolve substitutions for ref : str path to `value` in the `configuration` Returns ------- value : value after substitution """ if isinstance(value, str): # Substitute all intra-document references while True: match = cls.REF_PATTERN.search(value) if match is None: break path = os.path.join(os.path.dirname(ref), match.group('path')) try: value = value.replace( match.group(0), str(util.get_value(configuration, path))) except KeyError: raise KeyError(path) # Substitute all variable references while True: match = cls.VAR_PATTERN.search(value) if match is None: break value = value.replace( match.group(0), str(util.get_value(cls.VARIABLES, match.group('path'), '/'))) return value
[ "def", "substitute_variables", "(", "cls", ",", "configuration", ",", "value", ",", "ref", ")", ":", "if", "isinstance", "(", "value", ",", "str", ")", ":", "# Substitute all intra-document references", "while", "True", ":", "match", "=", "cls", ".", "REF_PATT...
Substitute variables in `value` from `configuration` where any path reference is relative to `ref`. Parameters ---------- configuration : dict configuration (required to resolve intra-document references) value : value to resolve substitutions for ref : str path to `value` in the `configuration` Returns ------- value : value after substitution
[ "Substitute", "variables", "in", "value", "from", "configuration", "where", "any", "path", "reference", "is", "relative", "to", "ref", "." ]
4df80e1fe072d958020080d32c16551ff7703d51
https://github.com/spotify/docker_interface/blob/4df80e1fe072d958020080d32c16551ff7703d51/docker_interface/plugins/base.py#L347-L387
train
30,841
spotify/docker_interface
docker_interface/plugins/user.py
UserPlugin.get_user_group
def get_user_group(self, user=None, group=None): """ Get the user and group information. Parameters ---------- user : str User name or user id (default is the `os.getuid()`). group : str Group name or group id (default is the group of `user`). Returns ------- user : pwd.struct_passwd User object. group : grp.struct_group Group object. """ user = user or os.getuid() # Convert the information we have obtained to a user object try: try: user = pwd.getpwuid(int(user)) except ValueError: user = pwd.getpwnam(user) except KeyError as ex: # pragma: no cover self.logger.fatal("could not resolve user: %s", ex) raise # Get the group group = group or user.pw_gid try: try: group = grp.getgrgid(int(group)) except ValueError: group = grp.getgrnam(group) except KeyError as ex: # pragma: no cover self.logger.fatal("could not resolve group:%s", ex) raise return user, group
python
def get_user_group(self, user=None, group=None): """ Get the user and group information. Parameters ---------- user : str User name or user id (default is the `os.getuid()`). group : str Group name or group id (default is the group of `user`). Returns ------- user : pwd.struct_passwd User object. group : grp.struct_group Group object. """ user = user or os.getuid() # Convert the information we have obtained to a user object try: try: user = pwd.getpwuid(int(user)) except ValueError: user = pwd.getpwnam(user) except KeyError as ex: # pragma: no cover self.logger.fatal("could not resolve user: %s", ex) raise # Get the group group = group or user.pw_gid try: try: group = grp.getgrgid(int(group)) except ValueError: group = grp.getgrnam(group) except KeyError as ex: # pragma: no cover self.logger.fatal("could not resolve group:%s", ex) raise return user, group
[ "def", "get_user_group", "(", "self", ",", "user", "=", "None", ",", "group", "=", "None", ")", ":", "user", "=", "user", "or", "os", ".", "getuid", "(", ")", "# Convert the information we have obtained to a user object", "try", ":", "try", ":", "user", "=",...
Get the user and group information. Parameters ---------- user : str User name or user id (default is the `os.getuid()`). group : str Group name or group id (default is the group of `user`). Returns ------- user : pwd.struct_passwd User object. group : grp.struct_group Group object.
[ "Get", "the", "user", "and", "group", "information", "." ]
4df80e1fe072d958020080d32c16551ff7703d51
https://github.com/spotify/docker_interface/blob/4df80e1fe072d958020080d32c16551ff7703d51/docker_interface/plugins/user.py#L60-L100
train
30,842
git-afsantos/bonsai
bonsai/model.py
pretty_str
def pretty_str(something, indent=0): """Return a human-readable string representation of an object. Uses `pretty_str` if the given value is an instance of `CodeEntity` and `repr` otherwise. Args: something: Some value to convert. Kwargs: indent (int): The amount of spaces to use as indentation. """ if isinstance(something, CodeEntity): return something.pretty_str(indent=indent) else: return (' ' * indent) + repr(something)
python
def pretty_str(something, indent=0): """Return a human-readable string representation of an object. Uses `pretty_str` if the given value is an instance of `CodeEntity` and `repr` otherwise. Args: something: Some value to convert. Kwargs: indent (int): The amount of spaces to use as indentation. """ if isinstance(something, CodeEntity): return something.pretty_str(indent=indent) else: return (' ' * indent) + repr(something)
[ "def", "pretty_str", "(", "something", ",", "indent", "=", "0", ")", ":", "if", "isinstance", "(", "something", ",", "CodeEntity", ")", ":", "return", "something", ".", "pretty_str", "(", "indent", "=", "indent", ")", "else", ":", "return", "(", "' '", ...
Return a human-readable string representation of an object. Uses `pretty_str` if the given value is an instance of `CodeEntity` and `repr` otherwise. Args: something: Some value to convert. Kwargs: indent (int): The amount of spaces to use as indentation.
[ "Return", "a", "human", "-", "readable", "string", "representation", "of", "an", "object", "." ]
aa5af3f535b3b506bfc95c107c501fc9c4bcd072
https://github.com/git-afsantos/bonsai/blob/aa5af3f535b3b506bfc95c107c501fc9c4bcd072/bonsai/model.py#L1622-L1637
train
30,843
git-afsantos/bonsai
bonsai/model.py
CodeEntity.walk_preorder
def walk_preorder(self): """Iterates the program tree starting from this object, going down.""" yield self for child in self._children(): for descendant in child.walk_preorder(): yield descendant
python
def walk_preorder(self): """Iterates the program tree starting from this object, going down.""" yield self for child in self._children(): for descendant in child.walk_preorder(): yield descendant
[ "def", "walk_preorder", "(", "self", ")", ":", "yield", "self", "for", "child", "in", "self", ".", "_children", "(", ")", ":", "for", "descendant", "in", "child", ".", "walk_preorder", "(", ")", ":", "yield", "descendant" ]
Iterates the program tree starting from this object, going down.
[ "Iterates", "the", "program", "tree", "starting", "from", "this", "object", "going", "down", "." ]
aa5af3f535b3b506bfc95c107c501fc9c4bcd072
https://github.com/git-afsantos/bonsai/blob/aa5af3f535b3b506bfc95c107c501fc9c4bcd072/bonsai/model.py#L49-L54
train
30,844
git-afsantos/bonsai
bonsai/model.py
CodeEntity._lookup_parent
def _lookup_parent(self, cls): """Lookup a transitive parent object that is an instance of a given class.""" codeobj = self.parent while codeobj is not None and not isinstance(codeobj, cls): codeobj = codeobj.parent return codeobj
python
def _lookup_parent(self, cls): """Lookup a transitive parent object that is an instance of a given class.""" codeobj = self.parent while codeobj is not None and not isinstance(codeobj, cls): codeobj = codeobj.parent return codeobj
[ "def", "_lookup_parent", "(", "self", ",", "cls", ")", ":", "codeobj", "=", "self", ".", "parent", "while", "codeobj", "is", "not", "None", "and", "not", "isinstance", "(", "codeobj", ",", "cls", ")", ":", "codeobj", "=", "codeobj", ".", "parent", "ret...
Lookup a transitive parent object that is an instance of a given class.
[ "Lookup", "a", "transitive", "parent", "object", "that", "is", "an", "instance", "of", "a", "given", "class", "." ]
aa5af3f535b3b506bfc95c107c501fc9c4bcd072
https://github.com/git-afsantos/bonsai/blob/aa5af3f535b3b506bfc95c107c501fc9c4bcd072/bonsai/model.py#L87-L93
train
30,845
git-afsantos/bonsai
bonsai/model.py
CodeEntity.ast_str
def ast_str(self, indent=0): """Return a minimal string to print a tree-like structure. Kwargs: indent (int): The number of indentation levels. """ line = self.line or 0 col = self.column or 0 name = type(self).__name__ spell = getattr(self, 'name', '[no spelling]') result = ' ({})'.format(self.result) if hasattr(self, 'result') else '' prefix = indent * '| ' return '{}[{}:{}] {}{}: {}'.format(prefix, line, col, name, result, spell)
python
def ast_str(self, indent=0): """Return a minimal string to print a tree-like structure. Kwargs: indent (int): The number of indentation levels. """ line = self.line or 0 col = self.column or 0 name = type(self).__name__ spell = getattr(self, 'name', '[no spelling]') result = ' ({})'.format(self.result) if hasattr(self, 'result') else '' prefix = indent * '| ' return '{}[{}:{}] {}{}: {}'.format(prefix, line, col, name, result, spell)
[ "def", "ast_str", "(", "self", ",", "indent", "=", "0", ")", ":", "line", "=", "self", ".", "line", "or", "0", "col", "=", "self", ".", "column", "or", "0", "name", "=", "type", "(", "self", ")", ".", "__name__", "spell", "=", "getattr", "(", "...
Return a minimal string to print a tree-like structure. Kwargs: indent (int): The number of indentation levels.
[ "Return", "a", "minimal", "string", "to", "print", "a", "tree", "-", "like", "structure", "." ]
aa5af3f535b3b506bfc95c107c501fc9c4bcd072
https://github.com/git-afsantos/bonsai/blob/aa5af3f535b3b506bfc95c107c501fc9c4bcd072/bonsai/model.py#L103-L116
train
30,846
git-afsantos/bonsai
bonsai/model.py
CodeVariable.is_local
def is_local(self): """Whether this is a local variable. In general, a variable is *local* if its containing scope is a statement (e.g. a block), or a function, given that the variable is not one of the function's parameters. """ return (isinstance(self.scope, CodeStatement) or (isinstance(self.scope, CodeFunction) and self not in self.scope.parameters))
python
def is_local(self): """Whether this is a local variable. In general, a variable is *local* if its containing scope is a statement (e.g. a block), or a function, given that the variable is not one of the function's parameters. """ return (isinstance(self.scope, CodeStatement) or (isinstance(self.scope, CodeFunction) and self not in self.scope.parameters))
[ "def", "is_local", "(", "self", ")", ":", "return", "(", "isinstance", "(", "self", ".", "scope", ",", "CodeStatement", ")", "or", "(", "isinstance", "(", "self", ".", "scope", ",", "CodeFunction", ")", "and", "self", "not", "in", "self", ".", "scope",...
Whether this is a local variable. In general, a variable is *local* if its containing scope is a statement (e.g. a block), or a function, given that the variable is not one of the function's parameters.
[ "Whether", "this", "is", "a", "local", "variable", "." ]
aa5af3f535b3b506bfc95c107c501fc9c4bcd072
https://github.com/git-afsantos/bonsai/blob/aa5af3f535b3b506bfc95c107c501fc9c4bcd072/bonsai/model.py#L199-L208
train
30,847
git-afsantos/bonsai
bonsai/model.py
CodeVariable.is_parameter
def is_parameter(self): """Whether this is a function parameter.""" return (isinstance(self.scope, CodeFunction) and self in self.scope.parameters)
python
def is_parameter(self): """Whether this is a function parameter.""" return (isinstance(self.scope, CodeFunction) and self in self.scope.parameters)
[ "def", "is_parameter", "(", "self", ")", ":", "return", "(", "isinstance", "(", "self", ".", "scope", ",", "CodeFunction", ")", "and", "self", "in", "self", ".", "scope", ".", "parameters", ")" ]
Whether this is a function parameter.
[ "Whether", "this", "is", "a", "function", "parameter", "." ]
aa5af3f535b3b506bfc95c107c501fc9c4bcd072
https://github.com/git-afsantos/bonsai/blob/aa5af3f535b3b506bfc95c107c501fc9c4bcd072/bonsai/model.py#L220-L223
train
30,848
git-afsantos/bonsai
bonsai/model.py
CodeFunction._afterpass
def _afterpass(self): """Assign a function-local index to each child object and register write operations to variables. This should only be called after the object is fully built. """ if hasattr(self, '_fi'): return fi = 0 for codeobj in self.walk_preorder(): codeobj._fi = fi fi += 1 if isinstance(codeobj, CodeOperator) and codeobj.is_assignment: if codeobj.arguments and isinstance(codeobj.arguments[0], CodeReference): var = codeobj.arguments[0].reference if isinstance(var, CodeVariable): var.writes.append(codeobj)
python
def _afterpass(self): """Assign a function-local index to each child object and register write operations to variables. This should only be called after the object is fully built. """ if hasattr(self, '_fi'): return fi = 0 for codeobj in self.walk_preorder(): codeobj._fi = fi fi += 1 if isinstance(codeobj, CodeOperator) and codeobj.is_assignment: if codeobj.arguments and isinstance(codeobj.arguments[0], CodeReference): var = codeobj.arguments[0].reference if isinstance(var, CodeVariable): var.writes.append(codeobj)
[ "def", "_afterpass", "(", "self", ")", ":", "if", "hasattr", "(", "self", ",", "'_fi'", ")", ":", "return", "fi", "=", "0", "for", "codeobj", "in", "self", ".", "walk_preorder", "(", ")", ":", "codeobj", ".", "_fi", "=", "fi", "fi", "+=", "1", "i...
Assign a function-local index to each child object and register write operations to variables. This should only be called after the object is fully built.
[ "Assign", "a", "function", "-", "local", "index", "to", "each", "child", "object", "and", "register", "write", "operations", "to", "variables", "." ]
aa5af3f535b3b506bfc95c107c501fc9c4bcd072
https://github.com/git-afsantos/bonsai/blob/aa5af3f535b3b506bfc95c107c501fc9c4bcd072/bonsai/model.py#L307-L324
train
30,849
git-afsantos/bonsai
bonsai/model.py
CodeControlFlow._set_condition
def _set_condition(self, condition): """Set the condition for this control flow structure.""" assert isinstance(condition, CodeExpression.TYPES) self.condition = condition
python
def _set_condition(self, condition): """Set the condition for this control flow structure.""" assert isinstance(condition, CodeExpression.TYPES) self.condition = condition
[ "def", "_set_condition", "(", "self", ",", "condition", ")", ":", "assert", "isinstance", "(", "condition", ",", "CodeExpression", ".", "TYPES", ")", "self", ".", "condition", "=", "condition" ]
Set the condition for this control flow structure.
[ "Set", "the", "condition", "for", "this", "control", "flow", "structure", "." ]
aa5af3f535b3b506bfc95c107c501fc9c4bcd072
https://github.com/git-afsantos/bonsai/blob/aa5af3f535b3b506bfc95c107c501fc9c4bcd072/bonsai/model.py#L1261-L1264
train
30,850
git-afsantos/bonsai
bonsai/model.py
CodeControlFlow._set_body
def _set_body(self, body): """Set the main body for this control flow structure.""" assert isinstance(body, CodeStatement) if isinstance(body, CodeBlock): self.body = body else: self.body._add(body)
python
def _set_body(self, body): """Set the main body for this control flow structure.""" assert isinstance(body, CodeStatement) if isinstance(body, CodeBlock): self.body = body else: self.body._add(body)
[ "def", "_set_body", "(", "self", ",", "body", ")", ":", "assert", "isinstance", "(", "body", ",", "CodeStatement", ")", "if", "isinstance", "(", "body", ",", "CodeBlock", ")", ":", "self", ".", "body", "=", "body", "else", ":", "self", ".", "body", "...
Set the main body for this control flow structure.
[ "Set", "the", "main", "body", "for", "this", "control", "flow", "structure", "." ]
aa5af3f535b3b506bfc95c107c501fc9c4bcd072
https://github.com/git-afsantos/bonsai/blob/aa5af3f535b3b506bfc95c107c501fc9c4bcd072/bonsai/model.py#L1266-L1272
train
30,851
git-afsantos/bonsai
bonsai/model.py
CodeConditional.get_branches
def get_branches(self): """Return a list with the conditional branch and the default branch.""" if self.else_branch: return [self.then_branch, self.else_branch] return [self.then_branch]
python
def get_branches(self): """Return a list with the conditional branch and the default branch.""" if self.else_branch: return [self.then_branch, self.else_branch] return [self.then_branch]
[ "def", "get_branches", "(", "self", ")", ":", "if", "self", ".", "else_branch", ":", "return", "[", "self", ".", "then_branch", ",", "self", ".", "else_branch", "]", "return", "[", "self", ".", "then_branch", "]" ]
Return a list with the conditional branch and the default branch.
[ "Return", "a", "list", "with", "the", "conditional", "branch", "and", "the", "default", "branch", "." ]
aa5af3f535b3b506bfc95c107c501fc9c4bcd072
https://github.com/git-afsantos/bonsai/blob/aa5af3f535b3b506bfc95c107c501fc9c4bcd072/bonsai/model.py#L1350-L1354
train
30,852
rehandalal/therapist
therapist/utils/filesystem.py
list_files
def list_files(path): """Recursively collects a list of files at a path.""" files = [] if os.path.isdir(path): for stats in os.walk(path): for f in stats[2]: files.append(os.path.join(stats[0], f)) elif os.path.isfile(path): files = [path] return files
python
def list_files(path): """Recursively collects a list of files at a path.""" files = [] if os.path.isdir(path): for stats in os.walk(path): for f in stats[2]: files.append(os.path.join(stats[0], f)) elif os.path.isfile(path): files = [path] return files
[ "def", "list_files", "(", "path", ")", ":", "files", "=", "[", "]", "if", "os", ".", "path", ".", "isdir", "(", "path", ")", ":", "for", "stats", "in", "os", ".", "walk", "(", "path", ")", ":", "for", "f", "in", "stats", "[", "2", "]", ":", ...
Recursively collects a list of files at a path.
[ "Recursively", "collects", "a", "list", "of", "files", "at", "a", "path", "." ]
1995a7e396eea2ec8685bb32a779a4110b459b1f
https://github.com/rehandalal/therapist/blob/1995a7e396eea2ec8685bb32a779a4110b459b1f/therapist/utils/filesystem.py#L14-L23
train
30,853
rehandalal/therapist
therapist/runner/result.py
ResultCollection.dump
def dump(self): """Returns the results in string format.""" text = '' for result in self.objects: if result.is_failure or result.is_error: text += '\n#{red}#{bright}' text += '{}\n'.format(''.ljust(79, '=')) status = 'FAILED' if result.is_failure else 'ERROR' text += '{}: {}\n'.format(status, result.process) text += '{}\n#{{reset_all}}'.format(''.ljust(79, '=')) if result.output: text += result.output if result.error: if result.output: text += '\n{}\n'.format(''.ljust(79, '-')) text += 'Additional error output:\n' text += '{}\n'.format(''.ljust(79, '-')) text += result.error if not text.endswith('\n'): text += '\n' if self.has_modified_files: text += '\n#{{yellow}}#{{bright}}{}\n'.format(''.ljust(79, '-')) text += 'Modified files:\n' text += '{}\n'.format(''.ljust(79, '-')) for path, modified_by in self.modified_files: text += '#{{reset_all}}{} #{{cyan}}<- {}\n'.format(path, ', '.join(modified_by)) return text
python
def dump(self): """Returns the results in string format.""" text = '' for result in self.objects: if result.is_failure or result.is_error: text += '\n#{red}#{bright}' text += '{}\n'.format(''.ljust(79, '=')) status = 'FAILED' if result.is_failure else 'ERROR' text += '{}: {}\n'.format(status, result.process) text += '{}\n#{{reset_all}}'.format(''.ljust(79, '=')) if result.output: text += result.output if result.error: if result.output: text += '\n{}\n'.format(''.ljust(79, '-')) text += 'Additional error output:\n' text += '{}\n'.format(''.ljust(79, '-')) text += result.error if not text.endswith('\n'): text += '\n' if self.has_modified_files: text += '\n#{{yellow}}#{{bright}}{}\n'.format(''.ljust(79, '-')) text += 'Modified files:\n' text += '{}\n'.format(''.ljust(79, '-')) for path, modified_by in self.modified_files: text += '#{{reset_all}}{} #{{cyan}}<- {}\n'.format(path, ', '.join(modified_by)) return text
[ "def", "dump", "(", "self", ")", ":", "text", "=", "''", "for", "result", "in", "self", ".", "objects", ":", "if", "result", ".", "is_failure", "or", "result", ".", "is_error", ":", "text", "+=", "'\\n#{red}#{bright}'", "text", "+=", "'{}\\n'", ".", "f...
Returns the results in string format.
[ "Returns", "the", "results", "in", "string", "format", "." ]
1995a7e396eea2ec8685bb32a779a4110b459b1f
https://github.com/rehandalal/therapist/blob/1995a7e396eea2ec8685bb32a779a4110b459b1f/therapist/runner/result.py#L145-L178
train
30,854
rehandalal/therapist
therapist/runner/result.py
ResultCollection.dump_junit
def dump_junit(self): """Returns a string containing XML mapped to the JUnit schema.""" testsuites = ElementTree.Element('testsuites', name='therapist', time=str(round(self.execution_time, 2)), tests=str(self.count()), failures=str(self.count(status=Result.FAILURE)), errors=str(self.count(status=Result.ERROR))) for result in self.objects: failures = '1' if result.is_failure else '0' errors = '1' if result.is_error else '0' testsuite = ElementTree.SubElement(testsuites, 'testsuite', id=result.process.name, name=str(result.process), time=str(round(result.execution_time, 2)), tests='1', failures=failures, errors=errors) testcase = ElementTree.SubElement(testsuite, 'testcase', time=str(round(result.execution_time, 2))) testcase.attrib['name'] = result.process.name if result.is_failure or result.is_error: if result.is_failure: element = ElementTree.SubElement(testcase, 'failure', type='failure') else: element = ElementTree.SubElement(testcase, 'error', type='error') if result.error: element.text = result.error else: element.text = result.output if result.output else '' xmlstr = ElementTree.tostring(testsuites, encoding='utf-8').decode('utf-8') return '<?xml version="1.0" encoding="UTF-8"?>\n{}'.format(xmlstr)
python
def dump_junit(self): """Returns a string containing XML mapped to the JUnit schema.""" testsuites = ElementTree.Element('testsuites', name='therapist', time=str(round(self.execution_time, 2)), tests=str(self.count()), failures=str(self.count(status=Result.FAILURE)), errors=str(self.count(status=Result.ERROR))) for result in self.objects: failures = '1' if result.is_failure else '0' errors = '1' if result.is_error else '0' testsuite = ElementTree.SubElement(testsuites, 'testsuite', id=result.process.name, name=str(result.process), time=str(round(result.execution_time, 2)), tests='1', failures=failures, errors=errors) testcase = ElementTree.SubElement(testsuite, 'testcase', time=str(round(result.execution_time, 2))) testcase.attrib['name'] = result.process.name if result.is_failure or result.is_error: if result.is_failure: element = ElementTree.SubElement(testcase, 'failure', type='failure') else: element = ElementTree.SubElement(testcase, 'error', type='error') if result.error: element.text = result.error else: element.text = result.output if result.output else '' xmlstr = ElementTree.tostring(testsuites, encoding='utf-8').decode('utf-8') return '<?xml version="1.0" encoding="UTF-8"?>\n{}'.format(xmlstr)
[ "def", "dump_junit", "(", "self", ")", ":", "testsuites", "=", "ElementTree", ".", "Element", "(", "'testsuites'", ",", "name", "=", "'therapist'", ",", "time", "=", "str", "(", "round", "(", "self", ".", "execution_time", ",", "2", ")", ")", ",", "tes...
Returns a string containing XML mapped to the JUnit schema.
[ "Returns", "a", "string", "containing", "XML", "mapped", "to", "the", "JUnit", "schema", "." ]
1995a7e396eea2ec8685bb32a779a4110b459b1f
https://github.com/rehandalal/therapist/blob/1995a7e396eea2ec8685bb32a779a4110b459b1f/therapist/runner/result.py#L180-L209
train
30,855
rehandalal/therapist
therapist/cli.py
install
def install(**kwargs): """Install the pre-commit hook.""" force = kwargs.get('force') preserve_legacy = kwargs.get('preserve_legacy') colorama.init(strip=kwargs.get('no_color')) stdout = subprocess.check_output('which therapist', shell=True) therapist_bin = stdout.decode('utf-8').split()[0] git_dir = current_git_dir() if git_dir is None: output(NOT_GIT_REPO_MSG) exit(1) hook_options = { 'fix': '--fix' if kwargs.get('fix') else '', 'stage_modified_files': '' if kwargs.get('no_stage_modified_files') else '--stage-modified-files', 'therapist_bin': therapist_bin, } srchook_path = os.path.join(BASE_DIR, 'hooks', 'pre-commit-template') with open(srchook_path, 'r') as f: srchook = f.read() srchook_hash = hash_hook(srchook_path, hook_options) dsthook_path = os.path.join(git_dir, 'hooks', 'pre-commit') if os.path.isfile(dsthook_path): dsthook_hash = identify_hook(dsthook_path) if dsthook_hash: if dsthook_hash == srchook_hash: output(HOOK_ALREADY_INSTALLED_MSG) exit(0) else: if not force and not preserve_legacy: print(EXISTING_HOOK_MSG) preserve_legacy = click.confirm(CONFIRM_PRESERVE_LEGACY_HOOK_MSG, default=True) if preserve_legacy: output(COPYING_HOOK_MSG, end='') shutil.copy2(dsthook_path, '{}.legacy'.format(dsthook_path)) output(DONE_COPYING_HOOK_MSG) elif not force: if not click.confirm(CONFIRM_REPLACE_HOOK_MSG, default=False): output(INSTALL_ABORTED_MSG) exit(1) output(INSTALLING_HOOK_MSG, end='') with open(dsthook_path, 'w+') as f: srchook = srchook.replace('%hash%', srchook_hash) for k, v in iteritems(hook_options): srchook = srchook.replace('%{}%'.format(k), v) f.write(srchook) os.chmod(dsthook_path, 0o775) output(DONE_INSTALLING_HOOK_MSG)
python
def install(**kwargs): """Install the pre-commit hook.""" force = kwargs.get('force') preserve_legacy = kwargs.get('preserve_legacy') colorama.init(strip=kwargs.get('no_color')) stdout = subprocess.check_output('which therapist', shell=True) therapist_bin = stdout.decode('utf-8').split()[0] git_dir = current_git_dir() if git_dir is None: output(NOT_GIT_REPO_MSG) exit(1) hook_options = { 'fix': '--fix' if kwargs.get('fix') else '', 'stage_modified_files': '' if kwargs.get('no_stage_modified_files') else '--stage-modified-files', 'therapist_bin': therapist_bin, } srchook_path = os.path.join(BASE_DIR, 'hooks', 'pre-commit-template') with open(srchook_path, 'r') as f: srchook = f.read() srchook_hash = hash_hook(srchook_path, hook_options) dsthook_path = os.path.join(git_dir, 'hooks', 'pre-commit') if os.path.isfile(dsthook_path): dsthook_hash = identify_hook(dsthook_path) if dsthook_hash: if dsthook_hash == srchook_hash: output(HOOK_ALREADY_INSTALLED_MSG) exit(0) else: if not force and not preserve_legacy: print(EXISTING_HOOK_MSG) preserve_legacy = click.confirm(CONFIRM_PRESERVE_LEGACY_HOOK_MSG, default=True) if preserve_legacy: output(COPYING_HOOK_MSG, end='') shutil.copy2(dsthook_path, '{}.legacy'.format(dsthook_path)) output(DONE_COPYING_HOOK_MSG) elif not force: if not click.confirm(CONFIRM_REPLACE_HOOK_MSG, default=False): output(INSTALL_ABORTED_MSG) exit(1) output(INSTALLING_HOOK_MSG, end='') with open(dsthook_path, 'w+') as f: srchook = srchook.replace('%hash%', srchook_hash) for k, v in iteritems(hook_options): srchook = srchook.replace('%{}%'.format(k), v) f.write(srchook) os.chmod(dsthook_path, 0o775) output(DONE_INSTALLING_HOOK_MSG)
[ "def", "install", "(", "*", "*", "kwargs", ")", ":", "force", "=", "kwargs", ".", "get", "(", "'force'", ")", "preserve_legacy", "=", "kwargs", ".", "get", "(", "'preserve_legacy'", ")", "colorama", ".", "init", "(", "strip", "=", "kwargs", ".", "get",...
Install the pre-commit hook.
[ "Install", "the", "pre", "-", "commit", "hook", "." ]
1995a7e396eea2ec8685bb32a779a4110b459b1f
https://github.com/rehandalal/therapist/blob/1995a7e396eea2ec8685bb32a779a4110b459b1f/therapist/cli.py#L80-L136
train
30,856
rehandalal/therapist
therapist/cli.py
uninstall
def uninstall(**kwargs): """Uninstall the current pre-commit hook.""" force = kwargs.get('force') restore_legacy = kwargs.get('restore_legacy') colorama.init(strip=kwargs.get('no_color')) git_dir = current_git_dir() if git_dir is None: output(NOT_GIT_REPO_MSG) exit(1) hook_path = os.path.join(git_dir, 'hooks', 'pre-commit') if not os.path.isfile(hook_path): output(NO_HOOK_INSTALLED_MSG) exit(0) hook_hash = identify_hook(hook_path) if hook_hash: if not force: if not click.confirm(CONFIRM_UNINSTALL_HOOK_MSG, default=False): output(UNINSTALL_ABORTED_MSG) exit(1) else: output(CURRENT_HOOK_NOT_THERAPIST_MSG) exit(1) legacy_hook_path = os.path.join(git_dir, 'hooks', 'pre-commit.legacy') if os.path.isfile(legacy_hook_path): if not force and not restore_legacy: output(LEGACY_HOOK_EXISTS_MSG) restore_legacy = click.confirm(CONFIRM_RESTORE_LEGACY_HOOK_MSG, default=True) if restore_legacy: output(COPYING_LEGACY_HOOK_MSG, end='') shutil.copy2(legacy_hook_path, hook_path) os.remove(legacy_hook_path) output(DONE_COPYING_LEGACY_HOOK_MSG) exit(0) else: if force or click.confirm('Would you like to remove the legacy hook?', default=False): output(REMOVING_LEGACY_HOOK_MSG, end='') os.remove(legacy_hook_path) output(DONE_REMOVING_LEGACY_HOOK_MSG) output(UNINSTALLING_HOOK_MSG, end='') os.remove(hook_path) output(DONE_UNINSTALLING_HOOK_MSG)
python
def uninstall(**kwargs): """Uninstall the current pre-commit hook.""" force = kwargs.get('force') restore_legacy = kwargs.get('restore_legacy') colorama.init(strip=kwargs.get('no_color')) git_dir = current_git_dir() if git_dir is None: output(NOT_GIT_REPO_MSG) exit(1) hook_path = os.path.join(git_dir, 'hooks', 'pre-commit') if not os.path.isfile(hook_path): output(NO_HOOK_INSTALLED_MSG) exit(0) hook_hash = identify_hook(hook_path) if hook_hash: if not force: if not click.confirm(CONFIRM_UNINSTALL_HOOK_MSG, default=False): output(UNINSTALL_ABORTED_MSG) exit(1) else: output(CURRENT_HOOK_NOT_THERAPIST_MSG) exit(1) legacy_hook_path = os.path.join(git_dir, 'hooks', 'pre-commit.legacy') if os.path.isfile(legacy_hook_path): if not force and not restore_legacy: output(LEGACY_HOOK_EXISTS_MSG) restore_legacy = click.confirm(CONFIRM_RESTORE_LEGACY_HOOK_MSG, default=True) if restore_legacy: output(COPYING_LEGACY_HOOK_MSG, end='') shutil.copy2(legacy_hook_path, hook_path) os.remove(legacy_hook_path) output(DONE_COPYING_LEGACY_HOOK_MSG) exit(0) else: if force or click.confirm('Would you like to remove the legacy hook?', default=False): output(REMOVING_LEGACY_HOOK_MSG, end='') os.remove(legacy_hook_path) output(DONE_REMOVING_LEGACY_HOOK_MSG) output(UNINSTALLING_HOOK_MSG, end='') os.remove(hook_path) output(DONE_UNINSTALLING_HOOK_MSG)
[ "def", "uninstall", "(", "*", "*", "kwargs", ")", ":", "force", "=", "kwargs", ".", "get", "(", "'force'", ")", "restore_legacy", "=", "kwargs", ".", "get", "(", "'restore_legacy'", ")", "colorama", ".", "init", "(", "strip", "=", "kwargs", ".", "get",...
Uninstall the current pre-commit hook.
[ "Uninstall", "the", "current", "pre", "-", "commit", "hook", "." ]
1995a7e396eea2ec8685bb32a779a4110b459b1f
https://github.com/rehandalal/therapist/blob/1995a7e396eea2ec8685bb32a779a4110b459b1f/therapist/cli.py#L145-L195
train
30,857
rehandalal/therapist
therapist/cli.py
run
def run(**kwargs): """Run the Therapist suite.""" paths = kwargs.pop('paths', ()) action = kwargs.pop('action') plugin = kwargs.pop('plugin') junit_xml = kwargs.pop('junit_xml') use_tracked_files = kwargs.pop('use_tracked_files') quiet = kwargs.pop('quiet') colorama.init(strip=kwargs.pop('no_color')) git_dir = current_git_dir() if git_dir is None: output(NOT_GIT_REPO_MSG) exit(1) repo_root = os.path.dirname(git_dir) files = [] if paths: # We want to look at files in their current state if paths are passed through kwargs['include_unstaged_changes'] = True # If paths were provided get all the files for each path for path in paths: for f in list_files(path): f = os.path.relpath(f, repo_root) if not f.startswith('..'): # Don't include files outside the repo root. files.append(f) elif use_tracked_files: # If the use tracked files flag was passed, get a list of all the tracked files out, err, code = git.ls_files() files = out.splitlines() if kwargs.get('include_untracked'): out, err, code = git.ls_files(o=True, exclude_standard=True) files += out.splitlines() if files or paths: kwargs['files'] = files config = get_config(repo_root) runner = Runner(config.cwd, **kwargs) results = ResultCollection() if runner.unstaged_changes and not quiet: output(UNSTAGED_CHANGES_MSG, end='\n\n') processes = list(config.actions) + list(config.plugins) processes.sort(key=lambda x: x.name) # Sort the list of processes for consistent results if plugin: try: processes = [config.plugins.get(plugin)] except config.plugins.DoesNotExist as e: output('{}\nAvailable plugins:'.format(e.message)) for p in config.plugins: output(p.name) exit(1) if action: try: processes = [config.actions.get(action)] except config.actions.DoesNotExist as e: output('{}\nAvailable actions:'.format(e.message)) for a in config.actions: output(a.name) exit(1) for process in processes: result, message = runner.run_process(process) results.append(result) if not quiet: output(message) if junit_xml: with open(junit_xml, 'w+') as f: f.write('{}'.format(results.dump_junit())) if not quiet: output(results.dump()) output('#{{bright}}{}\nCompleted in: {}s'.format(''.ljust(79, '-'), round(results.execution_time, 2))) if results.has_error: exit(1) elif results.has_failure: exit(2)
python
def run(**kwargs): """Run the Therapist suite.""" paths = kwargs.pop('paths', ()) action = kwargs.pop('action') plugin = kwargs.pop('plugin') junit_xml = kwargs.pop('junit_xml') use_tracked_files = kwargs.pop('use_tracked_files') quiet = kwargs.pop('quiet') colorama.init(strip=kwargs.pop('no_color')) git_dir = current_git_dir() if git_dir is None: output(NOT_GIT_REPO_MSG) exit(1) repo_root = os.path.dirname(git_dir) files = [] if paths: # We want to look at files in their current state if paths are passed through kwargs['include_unstaged_changes'] = True # If paths were provided get all the files for each path for path in paths: for f in list_files(path): f = os.path.relpath(f, repo_root) if not f.startswith('..'): # Don't include files outside the repo root. files.append(f) elif use_tracked_files: # If the use tracked files flag was passed, get a list of all the tracked files out, err, code = git.ls_files() files = out.splitlines() if kwargs.get('include_untracked'): out, err, code = git.ls_files(o=True, exclude_standard=True) files += out.splitlines() if files or paths: kwargs['files'] = files config = get_config(repo_root) runner = Runner(config.cwd, **kwargs) results = ResultCollection() if runner.unstaged_changes and not quiet: output(UNSTAGED_CHANGES_MSG, end='\n\n') processes = list(config.actions) + list(config.plugins) processes.sort(key=lambda x: x.name) # Sort the list of processes for consistent results if plugin: try: processes = [config.plugins.get(plugin)] except config.plugins.DoesNotExist as e: output('{}\nAvailable plugins:'.format(e.message)) for p in config.plugins: output(p.name) exit(1) if action: try: processes = [config.actions.get(action)] except config.actions.DoesNotExist as e: output('{}\nAvailable actions:'.format(e.message)) for a in config.actions: output(a.name) exit(1) for process in processes: result, message = runner.run_process(process) results.append(result) if not quiet: output(message) if junit_xml: with open(junit_xml, 'w+') as f: f.write('{}'.format(results.dump_junit())) if not quiet: output(results.dump()) output('#{{bright}}{}\nCompleted in: {}s'.format(''.ljust(79, '-'), round(results.execution_time, 2))) if results.has_error: exit(1) elif results.has_failure: exit(2)
[ "def", "run", "(", "*", "*", "kwargs", ")", ":", "paths", "=", "kwargs", ".", "pop", "(", "'paths'", ",", "(", ")", ")", "action", "=", "kwargs", ".", "pop", "(", "'action'", ")", "plugin", "=", "kwargs", ".", "pop", "(", "'plugin'", ")", "junit_...
Run the Therapist suite.
[ "Run", "the", "Therapist", "suite", "." ]
1995a7e396eea2ec8685bb32a779a4110b459b1f
https://github.com/rehandalal/therapist/blob/1995a7e396eea2ec8685bb32a779a4110b459b1f/therapist/cli.py#L211-L301
train
30,858
rehandalal/therapist
therapist/cli.py
use
def use(ctx, shortcut): """Use a shortcut.""" git_dir = current_git_dir() if git_dir is None: output(NOT_GIT_REPO_MSG) exit(1) repo_root = os.path.dirname(git_dir) config = get_config(repo_root) try: use_shortcut = config.shortcuts.get(shortcut) while use_shortcut.extends is not None: base = config.shortcuts.get(use_shortcut.extends) use_shortcut = base.extend(use_shortcut) except config.shortcuts.DoesNotExist as err: output('{}\nAvailable shortcuts:'.format(err.message)) for s in config.shortcuts: output(s.name) exit(1) else: options = use_shortcut.options for flag in use_shortcut.flags: options[flag.replace('-', '_')] = True options_string = '' for k, v in sorted(iteritems(options)): options_string += ' --{}'.format(k.replace('_', '-')) if v is not True: options_string += ' {}'.format(v) output('#{{dim}}$ therapist run{}\n'.format(options_string)) ctx.invoke(run, **options)
python
def use(ctx, shortcut): """Use a shortcut.""" git_dir = current_git_dir() if git_dir is None: output(NOT_GIT_REPO_MSG) exit(1) repo_root = os.path.dirname(git_dir) config = get_config(repo_root) try: use_shortcut = config.shortcuts.get(shortcut) while use_shortcut.extends is not None: base = config.shortcuts.get(use_shortcut.extends) use_shortcut = base.extend(use_shortcut) except config.shortcuts.DoesNotExist as err: output('{}\nAvailable shortcuts:'.format(err.message)) for s in config.shortcuts: output(s.name) exit(1) else: options = use_shortcut.options for flag in use_shortcut.flags: options[flag.replace('-', '_')] = True options_string = '' for k, v in sorted(iteritems(options)): options_string += ' --{}'.format(k.replace('_', '-')) if v is not True: options_string += ' {}'.format(v) output('#{{dim}}$ therapist run{}\n'.format(options_string)) ctx.invoke(run, **options)
[ "def", "use", "(", "ctx", ",", "shortcut", ")", ":", "git_dir", "=", "current_git_dir", "(", ")", "if", "git_dir", "is", "None", ":", "output", "(", "NOT_GIT_REPO_MSG", ")", "exit", "(", "1", ")", "repo_root", "=", "os", ".", "path", ".", "dirname", ...
Use a shortcut.
[ "Use", "a", "shortcut", "." ]
1995a7e396eea2ec8685bb32a779a4110b459b1f
https://github.com/rehandalal/therapist/blob/1995a7e396eea2ec8685bb32a779a4110b459b1f/therapist/cli.py#L307-L343
train
30,859
rehandalal/therapist
therapist/utils/hook.py
identify_hook
def identify_hook(path): """Verify that the file at path is the therapist hook and return the hash""" with open(path, 'r') as f: f.readline() # Discard the shebang line version_line = f.readline() if version_line.startswith('# THERAPIST'): return version_line.split()[2]
python
def identify_hook(path): """Verify that the file at path is the therapist hook and return the hash""" with open(path, 'r') as f: f.readline() # Discard the shebang line version_line = f.readline() if version_line.startswith('# THERAPIST'): return version_line.split()[2]
[ "def", "identify_hook", "(", "path", ")", ":", "with", "open", "(", "path", ",", "'r'", ")", "as", "f", ":", "f", ".", "readline", "(", ")", "# Discard the shebang line", "version_line", "=", "f", ".", "readline", "(", ")", "if", "version_line", ".", "...
Verify that the file at path is the therapist hook and return the hash
[ "Verify", "that", "the", "file", "at", "path", "is", "the", "therapist", "hook", "and", "return", "the", "hash" ]
1995a7e396eea2ec8685bb32a779a4110b459b1f
https://github.com/rehandalal/therapist/blob/1995a7e396eea2ec8685bb32a779a4110b459b1f/therapist/utils/hook.py#L6-L12
train
30,860
rehandalal/therapist
therapist/utils/hook.py
hash_hook
def hash_hook(path, options): """Hash a hook file""" with open(path, 'r') as f: data = f.read() for key in sorted(iterkeys(options)): data += '\n#{}={}'.format(key, options.get(key)) return hashlib.md5(data.encode()).hexdigest()
python
def hash_hook(path, options): """Hash a hook file""" with open(path, 'r') as f: data = f.read() for key in sorted(iterkeys(options)): data += '\n#{}={}'.format(key, options.get(key)) return hashlib.md5(data.encode()).hexdigest()
[ "def", "hash_hook", "(", "path", ",", "options", ")", ":", "with", "open", "(", "path", ",", "'r'", ")", "as", "f", ":", "data", "=", "f", ".", "read", "(", ")", "for", "key", "in", "sorted", "(", "iterkeys", "(", "options", ")", ")", ":", "dat...
Hash a hook file
[ "Hash", "a", "hook", "file" ]
1995a7e396eea2ec8685bb32a779a4110b459b1f
https://github.com/rehandalal/therapist/blob/1995a7e396eea2ec8685bb32a779a4110b459b1f/therapist/utils/hook.py#L15-L21
train
30,861
rehandalal/therapist
therapist/runner/runner.py
Runner.run_process
def run_process(self, process): """Runs a single action.""" message = u'#{bright}' message += u'{} '.format(str(process)[:68]).ljust(69, '.') stashed = False if self.unstaged_changes and not self.include_unstaged_changes: out, err, code = self.git.stash(keep_index=True, quiet=True) stashed = code == 0 try: result = process(files=self.files, cwd=self.cwd, fix=self.fix) # Check for modified files out, err, code = self.git.status(porcelain=True, untracked_files='no') for line in out.splitlines(): file_status = Status(line) # Make sure the file is one of the files that was processed if file_status.path in self.files and file_status.is_modified: mtime = os.path.getmtime(file_status.path) if os.path.exists(file_status.path) else 0 if mtime > self.file_mtimes.get(file_status.path, 0): self.file_mtimes[file_status.path] = mtime result.add_modified_file(file_status.path) if self.stage_modified_files: self.git.add(file_status.path) except: # noqa: E722 raise finally: if stashed: self.git.reset(hard=True, quiet=True) self.git.stash.pop(index=True, quiet=True) if result.is_success: message += u' #{green}[SUCCESS]' elif result.is_failure: message += u' #{red}[FAILURE]' elif result.is_skip: message += u' #{cyan}[SKIPPED]' elif result.is_error: message += u' #{red}[ERROR!!]' return result, message
python
def run_process(self, process): """Runs a single action.""" message = u'#{bright}' message += u'{} '.format(str(process)[:68]).ljust(69, '.') stashed = False if self.unstaged_changes and not self.include_unstaged_changes: out, err, code = self.git.stash(keep_index=True, quiet=True) stashed = code == 0 try: result = process(files=self.files, cwd=self.cwd, fix=self.fix) # Check for modified files out, err, code = self.git.status(porcelain=True, untracked_files='no') for line in out.splitlines(): file_status = Status(line) # Make sure the file is one of the files that was processed if file_status.path in self.files and file_status.is_modified: mtime = os.path.getmtime(file_status.path) if os.path.exists(file_status.path) else 0 if mtime > self.file_mtimes.get(file_status.path, 0): self.file_mtimes[file_status.path] = mtime result.add_modified_file(file_status.path) if self.stage_modified_files: self.git.add(file_status.path) except: # noqa: E722 raise finally: if stashed: self.git.reset(hard=True, quiet=True) self.git.stash.pop(index=True, quiet=True) if result.is_success: message += u' #{green}[SUCCESS]' elif result.is_failure: message += u' #{red}[FAILURE]' elif result.is_skip: message += u' #{cyan}[SKIPPED]' elif result.is_error: message += u' #{red}[ERROR!!]' return result, message
[ "def", "run_process", "(", "self", ",", "process", ")", ":", "message", "=", "u'#{bright}'", "message", "+=", "u'{} '", ".", "format", "(", "str", "(", "process", ")", "[", ":", "68", "]", ")", ".", "ljust", "(", "69", ",", "'.'", ")", "stashed", "...
Runs a single action.
[ "Runs", "a", "single", "action", "." ]
1995a7e396eea2ec8685bb32a779a4110b459b1f
https://github.com/rehandalal/therapist/blob/1995a7e396eea2ec8685bb32a779a4110b459b1f/therapist/runner/runner.py#L47-L90
train
30,862
flyte/upnpclient
upnpclient/soap.py
SOAP._extract_upnperror
def _extract_upnperror(self, err_xml): """ Extract the error code and error description from an error returned by the device. """ nsmap = {'s': list(err_xml.nsmap.values())[0]} fault_str = err_xml.findtext( 's:Body/s:Fault/faultstring', namespaces=nsmap) try: err = err_xml.xpath( 's:Body/s:Fault/detail/*[name()="%s"]' % fault_str, namespaces=nsmap)[0] except IndexError: msg = 'Tag with name of %r was not found in the error response.' % fault_str self._log.debug( msg + '\n' + etree.tostring(err_xml, pretty_print=True).decode('utf8')) raise SOAPProtocolError(msg) err_code = err.findtext('errorCode', namespaces=err.nsmap) err_desc = err.findtext('errorDescription', namespaces=err.nsmap) if err_code is None or err_desc is None: msg = 'Tags errorCode or errorDescription were not found in the error response.' self._log.debug( msg + '\n' + etree.tostring(err_xml, pretty_print=True).decode('utf8')) raise SOAPProtocolError(msg) return int(err_code), err_desc
python
def _extract_upnperror(self, err_xml): """ Extract the error code and error description from an error returned by the device. """ nsmap = {'s': list(err_xml.nsmap.values())[0]} fault_str = err_xml.findtext( 's:Body/s:Fault/faultstring', namespaces=nsmap) try: err = err_xml.xpath( 's:Body/s:Fault/detail/*[name()="%s"]' % fault_str, namespaces=nsmap)[0] except IndexError: msg = 'Tag with name of %r was not found in the error response.' % fault_str self._log.debug( msg + '\n' + etree.tostring(err_xml, pretty_print=True).decode('utf8')) raise SOAPProtocolError(msg) err_code = err.findtext('errorCode', namespaces=err.nsmap) err_desc = err.findtext('errorDescription', namespaces=err.nsmap) if err_code is None or err_desc is None: msg = 'Tags errorCode or errorDescription were not found in the error response.' self._log.debug( msg + '\n' + etree.tostring(err_xml, pretty_print=True).decode('utf8')) raise SOAPProtocolError(msg) return int(err_code), err_desc
[ "def", "_extract_upnperror", "(", "self", ",", "err_xml", ")", ":", "nsmap", "=", "{", "'s'", ":", "list", "(", "err_xml", ".", "nsmap", ".", "values", "(", ")", ")", "[", "0", "]", "}", "fault_str", "=", "err_xml", ".", "findtext", "(", "'s:Body/s:F...
Extract the error code and error description from an error returned by the device.
[ "Extract", "the", "error", "code", "and", "error", "description", "from", "an", "error", "returned", "by", "the", "device", "." ]
5529b950df33c0eaf0c24a9a307cf00fe627d0ad
https://github.com/flyte/upnpclient/blob/5529b950df33c0eaf0c24a9a307cf00fe627d0ad/upnpclient/soap.py#L35-L59
train
30,863
flyte/upnpclient
upnpclient/soap.py
SOAP._remove_extraneous_xml_declarations
def _remove_extraneous_xml_declarations(xml_str): """ Sometimes devices return XML with more than one XML declaration in, such as when returning their own XML config files. This removes the extra ones and preserves the first one. """ xml_declaration = '' if xml_str.startswith('<?xml'): xml_declaration, xml_str = xml_str.split('?>', maxsplit=1) xml_declaration += '?>' xml_str = re.sub(r'<\?xml.*?\?>', '', xml_str, flags=re.I) return xml_declaration + xml_str
python
def _remove_extraneous_xml_declarations(xml_str): """ Sometimes devices return XML with more than one XML declaration in, such as when returning their own XML config files. This removes the extra ones and preserves the first one. """ xml_declaration = '' if xml_str.startswith('<?xml'): xml_declaration, xml_str = xml_str.split('?>', maxsplit=1) xml_declaration += '?>' xml_str = re.sub(r'<\?xml.*?\?>', '', xml_str, flags=re.I) return xml_declaration + xml_str
[ "def", "_remove_extraneous_xml_declarations", "(", "xml_str", ")", ":", "xml_declaration", "=", "''", "if", "xml_str", ".", "startswith", "(", "'<?xml'", ")", ":", "xml_declaration", ",", "xml_str", "=", "xml_str", ".", "split", "(", "'?>'", ",", "maxsplit", "...
Sometimes devices return XML with more than one XML declaration in, such as when returning their own XML config files. This removes the extra ones and preserves the first one.
[ "Sometimes", "devices", "return", "XML", "with", "more", "than", "one", "XML", "declaration", "in", "such", "as", "when", "returning", "their", "own", "XML", "config", "files", ".", "This", "removes", "the", "extra", "ones", "and", "preserves", "the", "first...
5529b950df33c0eaf0c24a9a307cf00fe627d0ad
https://github.com/flyte/upnpclient/blob/5529b950df33c0eaf0c24a9a307cf00fe627d0ad/upnpclient/soap.py#L62-L72
train
30,864
flyte/upnpclient
upnpclient/soap.py
SOAP.call
def call(self, action_name, arg_in=None, http_auth=None, http_headers=None): """ Construct the XML and make the call to the device. Parse the response values into a dict. """ if arg_in is None: arg_in = {} soap_env = '{%s}' % NS_SOAP_ENV m = '{%s}' % self.service_type root = etree.Element(soap_env+'Envelope', nsmap={'SOAP-ENV': NS_SOAP_ENV}) root.attrib[soap_env+'encodingStyle'] = ENCODING_STYLE body = etree.SubElement(root, soap_env+'Body') action = etree.SubElement(body, m+action_name, nsmap={'m': self.service_type}) for key, value in arg_in.items(): etree.SubElement(action, key).text = str(value) body = etree.tostring(root, encoding=ENCODING, xml_declaration=True) headers = { 'SOAPAction': '"%s#%s"' % (self.service_type, action_name), 'Host': self._host, 'Content-Type': 'text/xml', 'Content-Length': str(len(body)), } headers.update(http_headers or {}) try: resp = requests.post( self.url, body, headers=headers, timeout=SOAP_TIMEOUT, auth=http_auth ) resp.raise_for_status() except requests.exceptions.HTTPError as exc: # If the body of the error response contains XML then it should be a UPnP error, # otherwise reraise the HTTPError. try: err_xml = etree.fromstring(exc.response.content) except etree.XMLSyntaxError: raise exc raise SOAPError(*self._extract_upnperror(err_xml)) xml_str = resp.content.strip() try: xml = etree.fromstring(xml_str) except etree.XMLSyntaxError: # Try removing any extra XML declarations in case there are more than one. # This sometimes happens when a device sends its own XML config files. xml = etree.fromstring(self._remove_extraneous_xml_declarations(xml_str)) except ValueError: # This can occur when requests returns a `str` (unicode) but there's also an XML # declaration, which lxml doesn't like. xml = etree.fromstring(xml_str.encode('utf8')) response = xml.find(".//{%s}%sResponse" % (self.service_type, action_name)) if response is None: msg = ('Returned XML did not include an element which matches namespace %r and tag name' ' \'%sResponse\'.' % (self.service_type, action_name)) self._log.debug(msg + '\n' + etree.tostring(xml, pretty_print=True).decode('utf8')) raise SOAPProtocolError(msg) # Sometimes devices return XML strings as their argument values without escaping them with # CDATA. This checks to see if the argument has been parsed as XML and un-parses it if so. ret = {} for arg in response.getchildren(): children = arg.getchildren() if children: ret[arg.tag] = b"\n".join(etree.tostring(x) for x in children) else: ret[arg.tag] = arg.text return ret
python
def call(self, action_name, arg_in=None, http_auth=None, http_headers=None): """ Construct the XML and make the call to the device. Parse the response values into a dict. """ if arg_in is None: arg_in = {} soap_env = '{%s}' % NS_SOAP_ENV m = '{%s}' % self.service_type root = etree.Element(soap_env+'Envelope', nsmap={'SOAP-ENV': NS_SOAP_ENV}) root.attrib[soap_env+'encodingStyle'] = ENCODING_STYLE body = etree.SubElement(root, soap_env+'Body') action = etree.SubElement(body, m+action_name, nsmap={'m': self.service_type}) for key, value in arg_in.items(): etree.SubElement(action, key).text = str(value) body = etree.tostring(root, encoding=ENCODING, xml_declaration=True) headers = { 'SOAPAction': '"%s#%s"' % (self.service_type, action_name), 'Host': self._host, 'Content-Type': 'text/xml', 'Content-Length': str(len(body)), } headers.update(http_headers or {}) try: resp = requests.post( self.url, body, headers=headers, timeout=SOAP_TIMEOUT, auth=http_auth ) resp.raise_for_status() except requests.exceptions.HTTPError as exc: # If the body of the error response contains XML then it should be a UPnP error, # otherwise reraise the HTTPError. try: err_xml = etree.fromstring(exc.response.content) except etree.XMLSyntaxError: raise exc raise SOAPError(*self._extract_upnperror(err_xml)) xml_str = resp.content.strip() try: xml = etree.fromstring(xml_str) except etree.XMLSyntaxError: # Try removing any extra XML declarations in case there are more than one. # This sometimes happens when a device sends its own XML config files. xml = etree.fromstring(self._remove_extraneous_xml_declarations(xml_str)) except ValueError: # This can occur when requests returns a `str` (unicode) but there's also an XML # declaration, which lxml doesn't like. xml = etree.fromstring(xml_str.encode('utf8')) response = xml.find(".//{%s}%sResponse" % (self.service_type, action_name)) if response is None: msg = ('Returned XML did not include an element which matches namespace %r and tag name' ' \'%sResponse\'.' % (self.service_type, action_name)) self._log.debug(msg + '\n' + etree.tostring(xml, pretty_print=True).decode('utf8')) raise SOAPProtocolError(msg) # Sometimes devices return XML strings as their argument values without escaping them with # CDATA. This checks to see if the argument has been parsed as XML and un-parses it if so. ret = {} for arg in response.getchildren(): children = arg.getchildren() if children: ret[arg.tag] = b"\n".join(etree.tostring(x) for x in children) else: ret[arg.tag] = arg.text return ret
[ "def", "call", "(", "self", ",", "action_name", ",", "arg_in", "=", "None", ",", "http_auth", "=", "None", ",", "http_headers", "=", "None", ")", ":", "if", "arg_in", "is", "None", ":", "arg_in", "=", "{", "}", "soap_env", "=", "'{%s}'", "%", "NS_SOA...
Construct the XML and make the call to the device. Parse the response values into a dict.
[ "Construct", "the", "XML", "and", "make", "the", "call", "to", "the", "device", ".", "Parse", "the", "response", "values", "into", "a", "dict", "." ]
5529b950df33c0eaf0c24a9a307cf00fe627d0ad
https://github.com/flyte/upnpclient/blob/5529b950df33c0eaf0c24a9a307cf00fe627d0ad/upnpclient/soap.py#L74-L146
train
30,865
flyte/upnpclient
upnpclient/upnp.py
Device._read_services
def _read_services(self): """ Read the control XML file and populate self.services with a list of services in the form of Service class instances. """ # The double slash in the XPath is deliberate, as services can be # listed in two places (Section 2.3 of uPNP device architecture v1.1) for node in self._findall('device//serviceList/service'): findtext = partial(node.findtext, namespaces=self._root_xml.nsmap) svc = Service( self, self._url_base, findtext('serviceType'), findtext('serviceId'), findtext('controlURL'), findtext('SCPDURL'), findtext('eventSubURL') ) self._log.debug( '%s: Service %r at %r', self.device_name, svc.service_type, svc.scpd_url) self.services.append(svc) self.service_map[svc.name] = svc
python
def _read_services(self): """ Read the control XML file and populate self.services with a list of services in the form of Service class instances. """ # The double slash in the XPath is deliberate, as services can be # listed in two places (Section 2.3 of uPNP device architecture v1.1) for node in self._findall('device//serviceList/service'): findtext = partial(node.findtext, namespaces=self._root_xml.nsmap) svc = Service( self, self._url_base, findtext('serviceType'), findtext('serviceId'), findtext('controlURL'), findtext('SCPDURL'), findtext('eventSubURL') ) self._log.debug( '%s: Service %r at %r', self.device_name, svc.service_type, svc.scpd_url) self.services.append(svc) self.service_map[svc.name] = svc
[ "def", "_read_services", "(", "self", ")", ":", "# The double slash in the XPath is deliberate, as services can be", "# listed in two places (Section 2.3 of uPNP device architecture v1.1)", "for", "node", "in", "self", ".", "_findall", "(", "'device//serviceList/service'", ")", ":"...
Read the control XML file and populate self.services with a list of services in the form of Service class instances.
[ "Read", "the", "control", "XML", "file", "and", "populate", "self", ".", "services", "with", "a", "list", "of", "services", "in", "the", "form", "of", "Service", "class", "instances", "." ]
5529b950df33c0eaf0c24a9a307cf00fe627d0ad
https://github.com/flyte/upnpclient/blob/5529b950df33c0eaf0c24a9a307cf00fe627d0ad/upnpclient/upnp.py#L164-L185
train
30,866
flyte/upnpclient
upnpclient/upnp.py
Device.find_action
def find_action(self, action_name): """Find an action by name. Convenience method that searches through all the services offered by the Server for an action and returns an Action instance. If the action is not found, returns None. If multiple actions with the same name are found it returns the first one. """ for service in self.services: action = service.find_action(action_name) if action is not None: return action
python
def find_action(self, action_name): """Find an action by name. Convenience method that searches through all the services offered by the Server for an action and returns an Action instance. If the action is not found, returns None. If multiple actions with the same name are found it returns the first one. """ for service in self.services: action = service.find_action(action_name) if action is not None: return action
[ "def", "find_action", "(", "self", ",", "action_name", ")", ":", "for", "service", "in", "self", ".", "services", ":", "action", "=", "service", ".", "find_action", "(", "action_name", ")", "if", "action", "is", "not", "None", ":", "return", "action" ]
Find an action by name. Convenience method that searches through all the services offered by the Server for an action and returns an Action instance. If the action is not found, returns None. If multiple actions with the same name are found it returns the first one.
[ "Find", "an", "action", "by", "name", ".", "Convenience", "method", "that", "searches", "through", "all", "the", "services", "offered", "by", "the", "Server", "for", "an", "action", "and", "returns", "an", "Action", "instance", ".", "If", "the", "action", ...
5529b950df33c0eaf0c24a9a307cf00fe627d0ad
https://github.com/flyte/upnpclient/blob/5529b950df33c0eaf0c24a9a307cf00fe627d0ad/upnpclient/upnp.py#L187-L197
train
30,867
flyte/upnpclient
upnpclient/upnp.py
Service.subscribe
def subscribe(self, callback_url, timeout=None): """ Set up a subscription to the events offered by this service. """ url = urljoin(self._url_base, self._event_sub_url) headers = dict( HOST=urlparse(url).netloc, CALLBACK='<%s>' % callback_url, NT='upnp:event' ) if timeout is not None: headers['TIMEOUT'] = 'Second-%s' % timeout resp = requests.request('SUBSCRIBE', url, headers=headers, auth=self.device.http_auth) resp.raise_for_status() return Service.validate_subscription_response(resp)
python
def subscribe(self, callback_url, timeout=None): """ Set up a subscription to the events offered by this service. """ url = urljoin(self._url_base, self._event_sub_url) headers = dict( HOST=urlparse(url).netloc, CALLBACK='<%s>' % callback_url, NT='upnp:event' ) if timeout is not None: headers['TIMEOUT'] = 'Second-%s' % timeout resp = requests.request('SUBSCRIBE', url, headers=headers, auth=self.device.http_auth) resp.raise_for_status() return Service.validate_subscription_response(resp)
[ "def", "subscribe", "(", "self", ",", "callback_url", ",", "timeout", "=", "None", ")", ":", "url", "=", "urljoin", "(", "self", ".", "_url_base", ",", "self", ".", "_event_sub_url", ")", "headers", "=", "dict", "(", "HOST", "=", "urlparse", "(", "url"...
Set up a subscription to the events offered by this service.
[ "Set", "up", "a", "subscription", "to", "the", "events", "offered", "by", "this", "service", "." ]
5529b950df33c0eaf0c24a9a307cf00fe627d0ad
https://github.com/flyte/upnpclient/blob/5529b950df33c0eaf0c24a9a307cf00fe627d0ad/upnpclient/upnp.py#L357-L371
train
30,868
flyte/upnpclient
upnpclient/upnp.py
Service.renew_subscription
def renew_subscription(self, sid, timeout=None): """ Renews a previously configured subscription. """ url = urljoin(self._url_base, self._event_sub_url) headers = dict( HOST=urlparse(url).netloc, SID=sid ) if timeout is not None: headers['TIMEOUT'] = 'Second-%s' % timeout resp = requests.request('SUBSCRIBE', url, headers=headers, auth=self.device.http_auth) resp.raise_for_status() return Service.validate_subscription_renewal_response(resp)
python
def renew_subscription(self, sid, timeout=None): """ Renews a previously configured subscription. """ url = urljoin(self._url_base, self._event_sub_url) headers = dict( HOST=urlparse(url).netloc, SID=sid ) if timeout is not None: headers['TIMEOUT'] = 'Second-%s' % timeout resp = requests.request('SUBSCRIBE', url, headers=headers, auth=self.device.http_auth) resp.raise_for_status() return Service.validate_subscription_renewal_response(resp)
[ "def", "renew_subscription", "(", "self", ",", "sid", ",", "timeout", "=", "None", ")", ":", "url", "=", "urljoin", "(", "self", ".", "_url_base", ",", "self", ".", "_event_sub_url", ")", "headers", "=", "dict", "(", "HOST", "=", "urlparse", "(", "url"...
Renews a previously configured subscription.
[ "Renews", "a", "previously", "configured", "subscription", "." ]
5529b950df33c0eaf0c24a9a307cf00fe627d0ad
https://github.com/flyte/upnpclient/blob/5529b950df33c0eaf0c24a9a307cf00fe627d0ad/upnpclient/upnp.py#L373-L386
train
30,869
flyte/upnpclient
upnpclient/upnp.py
Service.cancel_subscription
def cancel_subscription(self, sid): """ Unsubscribes from a previously configured subscription. """ url = urljoin(self._url_base, self._event_sub_url) headers = dict( HOST=urlparse(url).netloc, SID=sid ) resp = requests.request('UNSUBSCRIBE', url, headers=headers, auth=self.device.http_auth) resp.raise_for_status()
python
def cancel_subscription(self, sid): """ Unsubscribes from a previously configured subscription. """ url = urljoin(self._url_base, self._event_sub_url) headers = dict( HOST=urlparse(url).netloc, SID=sid ) resp = requests.request('UNSUBSCRIBE', url, headers=headers, auth=self.device.http_auth) resp.raise_for_status()
[ "def", "cancel_subscription", "(", "self", ",", "sid", ")", ":", "url", "=", "urljoin", "(", "self", ".", "_url_base", ",", "self", ".", "_event_sub_url", ")", "headers", "=", "dict", "(", "HOST", "=", "urlparse", "(", "url", ")", ".", "netloc", ",", ...
Unsubscribes from a previously configured subscription.
[ "Unsubscribes", "from", "a", "previously", "configured", "subscription", "." ]
5529b950df33c0eaf0c24a9a307cf00fe627d0ad
https://github.com/flyte/upnpclient/blob/5529b950df33c0eaf0c24a9a307cf00fe627d0ad/upnpclient/upnp.py#L388-L398
train
30,870
flyte/upnpclient
upnpclient/ssdp.py
discover
def discover(timeout=5): """ Convenience method to discover UPnP devices on the network. Returns a list of `upnp.Device` instances. Any invalid servers are silently ignored. """ devices = {} for entry in scan(timeout): if entry.location in devices: continue try: devices[entry.location] = Device(entry.location) except Exception as exc: log = _getLogger("ssdp") log.error('Error \'%s\' for %s', exc, entry.location) return list(devices.values())
python
def discover(timeout=5): """ Convenience method to discover UPnP devices on the network. Returns a list of `upnp.Device` instances. Any invalid servers are silently ignored. """ devices = {} for entry in scan(timeout): if entry.location in devices: continue try: devices[entry.location] = Device(entry.location) except Exception as exc: log = _getLogger("ssdp") log.error('Error \'%s\' for %s', exc, entry.location) return list(devices.values())
[ "def", "discover", "(", "timeout", "=", "5", ")", ":", "devices", "=", "{", "}", "for", "entry", "in", "scan", "(", "timeout", ")", ":", "if", "entry", ".", "location", "in", "devices", ":", "continue", "try", ":", "devices", "[", "entry", ".", "lo...
Convenience method to discover UPnP devices on the network. Returns a list of `upnp.Device` instances. Any invalid servers are silently ignored.
[ "Convenience", "method", "to", "discover", "UPnP", "devices", "on", "the", "network", ".", "Returns", "a", "list", "of", "upnp", ".", "Device", "instances", ".", "Any", "invalid", "servers", "are", "silently", "ignored", "." ]
5529b950df33c0eaf0c24a9a307cf00fe627d0ad
https://github.com/flyte/upnpclient/blob/5529b950df33c0eaf0c24a9a307cf00fe627d0ad/upnpclient/ssdp.py#L7-L22
train
30,871
MarcoFavorito/pythomata
pythomata/dfa.py
DFA.to_dot
def to_dot(self, path: str, title: Optional[str] = None): """ Print the automaton to a dot file :param path: the path where to save the file. :param title: :return: """ g = graphviz.Digraph(format='svg') g.node('fake', style='invisible') for state in self._states: if state == self._initial_state: if state in self._accepting_states: g.node(str(state), root='true', shape='doublecircle') else: g.node(str(state), root='true') elif state in self._accepting_states: g.node(str(state), shape='doublecircle') else: g.node(str(state)) g.edge('fake', str(self._initial_state), style='bold') for start in self._transition_function: for symbol, end in self._transition_function[start].items(): g.edge(str(start), str(end), label=str(symbol)) if title: g.attr(label=title) g.attr(fontsize='20') g.render(filename=path) return
python
def to_dot(self, path: str, title: Optional[str] = None): """ Print the automaton to a dot file :param path: the path where to save the file. :param title: :return: """ g = graphviz.Digraph(format='svg') g.node('fake', style='invisible') for state in self._states: if state == self._initial_state: if state in self._accepting_states: g.node(str(state), root='true', shape='doublecircle') else: g.node(str(state), root='true') elif state in self._accepting_states: g.node(str(state), shape='doublecircle') else: g.node(str(state)) g.edge('fake', str(self._initial_state), style='bold') for start in self._transition_function: for symbol, end in self._transition_function[start].items(): g.edge(str(start), str(end), label=str(symbol)) if title: g.attr(label=title) g.attr(fontsize='20') g.render(filename=path) return
[ "def", "to_dot", "(", "self", ",", "path", ":", "str", ",", "title", ":", "Optional", "[", "str", "]", "=", "None", ")", ":", "g", "=", "graphviz", ".", "Digraph", "(", "format", "=", "'svg'", ")", "g", ".", "node", "(", "'fake'", ",", "style", ...
Print the automaton to a dot file :param path: the path where to save the file. :param title: :return:
[ "Print", "the", "automaton", "to", "a", "dot", "file" ]
4739c620008b8fe50583eecb97f9bfd7f006b95c
https://github.com/MarcoFavorito/pythomata/blob/4739c620008b8fe50583eecb97f9bfd7f006b95c/pythomata/dfa.py#L248-L282
train
30,872
MarcoFavorito/pythomata
pythomata/dfa.py
DFA.levels_to_accepting_states
def levels_to_accepting_states(self) -> dict: """Return a dict from states to level, i.e. the number of steps to reach any accepting state. level = -1 if the state cannot reach any accepting state""" res = {accepting_state: 0 for accepting_state in self._accepting_states} level = 0 # least fixpoint z_current, z_next = set(), set() z_next = set(self._accepting_states) while z_current != z_next: level += 1 z_current = z_next z_next = copy(z_current) for state in self._transition_function: for action in self._transition_function[state]: if state in z_current: continue next_state = self._transition_function[state][action] if next_state in z_current: z_next.add(state) res[state] = level break z_current = z_next for failure_state in filter(lambda x: x not in z_current, self._states): res[failure_state] = -1 return res
python
def levels_to_accepting_states(self) -> dict: """Return a dict from states to level, i.e. the number of steps to reach any accepting state. level = -1 if the state cannot reach any accepting state""" res = {accepting_state: 0 for accepting_state in self._accepting_states} level = 0 # least fixpoint z_current, z_next = set(), set() z_next = set(self._accepting_states) while z_current != z_next: level += 1 z_current = z_next z_next = copy(z_current) for state in self._transition_function: for action in self._transition_function[state]: if state in z_current: continue next_state = self._transition_function[state][action] if next_state in z_current: z_next.add(state) res[state] = level break z_current = z_next for failure_state in filter(lambda x: x not in z_current, self._states): res[failure_state] = -1 return res
[ "def", "levels_to_accepting_states", "(", "self", ")", "->", "dict", ":", "res", "=", "{", "accepting_state", ":", "0", "for", "accepting_state", "in", "self", ".", "_accepting_states", "}", "level", "=", "0", "# least fixpoint", "z_current", ",", "z_next", "=...
Return a dict from states to level, i.e. the number of steps to reach any accepting state. level = -1 if the state cannot reach any accepting state
[ "Return", "a", "dict", "from", "states", "to", "level", "i", ".", "e", ".", "the", "number", "of", "steps", "to", "reach", "any", "accepting", "state", ".", "level", "=", "-", "1", "if", "the", "state", "cannot", "reach", "any", "accepting", "state" ]
4739c620008b8fe50583eecb97f9bfd7f006b95c
https://github.com/MarcoFavorito/pythomata/blob/4739c620008b8fe50583eecb97f9bfd7f006b95c/pythomata/dfa.py#L284-L313
train
30,873
MarcoFavorito/pythomata
pythomata/nfa.py
NFA.determinize
def determinize(self) -> DFA: """Determinize the NFA :return: the DFA equivalent to the DFA. """ nfa = self new_states = {macro_state for macro_state in powerset(nfa._states)} initial_state = frozenset([nfa._initial_state]) final_states = {q for q in new_states if len(q.intersection(nfa._accepting_states)) != 0} transition_function = {} for state_set in new_states: for action in nfa._alphabet: next_macrostate = set() for s in state_set: for next_state in nfa._transition_function.get(s, {}).get(action, set()): next_macrostate.add(next_state) next_macrostate = frozenset(next_macrostate) transition_function.setdefault(state_set, {})[action] = next_macrostate return DFA(new_states, set(nfa._alphabet), initial_state, set(final_states), transition_function)
python
def determinize(self) -> DFA: """Determinize the NFA :return: the DFA equivalent to the DFA. """ nfa = self new_states = {macro_state for macro_state in powerset(nfa._states)} initial_state = frozenset([nfa._initial_state]) final_states = {q for q in new_states if len(q.intersection(nfa._accepting_states)) != 0} transition_function = {} for state_set in new_states: for action in nfa._alphabet: next_macrostate = set() for s in state_set: for next_state in nfa._transition_function.get(s, {}).get(action, set()): next_macrostate.add(next_state) next_macrostate = frozenset(next_macrostate) transition_function.setdefault(state_set, {})[action] = next_macrostate return DFA(new_states, set(nfa._alphabet), initial_state, set(final_states), transition_function)
[ "def", "determinize", "(", "self", ")", "->", "DFA", ":", "nfa", "=", "self", "new_states", "=", "{", "macro_state", "for", "macro_state", "in", "powerset", "(", "nfa", ".", "_states", ")", "}", "initial_state", "=", "frozenset", "(", "[", "nfa", ".", ...
Determinize the NFA :return: the DFA equivalent to the DFA.
[ "Determinize", "the", "NFA" ]
4739c620008b8fe50583eecb97f9bfd7f006b95c
https://github.com/MarcoFavorito/pythomata/blob/4739c620008b8fe50583eecb97f9bfd7f006b95c/pythomata/nfa.py#L100-L124
train
30,874
peercoin/peercoin_rpc
peercoin_rpc/peercoin_rpc.py
Client.req
def req(self, method, params=()): """send request to ppcoind""" response = self.session.post( self.url, data=json.dumps({"method": method, "params": params, "jsonrpc": "1.1"}), ).json() if response["error"] is not None: return response["error"] else: return response["result"]
python
def req(self, method, params=()): """send request to ppcoind""" response = self.session.post( self.url, data=json.dumps({"method": method, "params": params, "jsonrpc": "1.1"}), ).json() if response["error"] is not None: return response["error"] else: return response["result"]
[ "def", "req", "(", "self", ",", "method", ",", "params", "=", "(", ")", ")", ":", "response", "=", "self", ".", "session", ".", "post", "(", "self", ".", "url", ",", "data", "=", "json", ".", "dumps", "(", "{", "\"method\"", ":", "method", ",", ...
send request to ppcoind
[ "send", "request", "to", "ppcoind" ]
6edd854c7fd607ad9f6f4d5eb8b8b7c7fd8c16cc
https://github.com/peercoin/peercoin_rpc/blob/6edd854c7fd607ad9f6f4d5eb8b8b7c7fd8c16cc/peercoin_rpc/peercoin_rpc.py#L85-L96
train
30,875
peercoin/peercoin_rpc
peercoin_rpc/peercoin_rpc.py
Client.batch
def batch(self, reqs): """ send batch request using jsonrpc 2.0 """ batch_data = [] for req_id, req in enumerate(reqs): batch_data.append( {"method": req[0], "params": req[1], "jsonrpc": "2.0", "id": req_id} ) data = json.dumps(batch_data) response = self.session.post(self.url, data=data).json() return response
python
def batch(self, reqs): """ send batch request using jsonrpc 2.0 """ batch_data = [] for req_id, req in enumerate(reqs): batch_data.append( {"method": req[0], "params": req[1], "jsonrpc": "2.0", "id": req_id} ) data = json.dumps(batch_data) response = self.session.post(self.url, data=data).json() return response
[ "def", "batch", "(", "self", ",", "reqs", ")", ":", "batch_data", "=", "[", "]", "for", "req_id", ",", "req", "in", "enumerate", "(", "reqs", ")", ":", "batch_data", ".", "append", "(", "{", "\"method\"", ":", "req", "[", "0", "]", ",", "\"params\"...
send batch request using jsonrpc 2.0
[ "send", "batch", "request", "using", "jsonrpc", "2", ".", "0" ]
6edd854c7fd607ad9f6f4d5eb8b8b7c7fd8c16cc
https://github.com/peercoin/peercoin_rpc/blob/6edd854c7fd607ad9f6f4d5eb8b8b7c7fd8c16cc/peercoin_rpc/peercoin_rpc.py#L98-L110
train
30,876
peercoin/peercoin_rpc
peercoin_rpc/peercoin_rpc.py
Client.walletpassphrase
def walletpassphrase(self, passphrase, timeout=99999999, mint_only=True): """used to unlock wallet for minting""" return self.req("walletpassphrase", [passphrase, timeout, mint_only])
python
def walletpassphrase(self, passphrase, timeout=99999999, mint_only=True): """used to unlock wallet for minting""" return self.req("walletpassphrase", [passphrase, timeout, mint_only])
[ "def", "walletpassphrase", "(", "self", ",", "passphrase", ",", "timeout", "=", "99999999", ",", "mint_only", "=", "True", ")", ":", "return", "self", ".", "req", "(", "\"walletpassphrase\"", ",", "[", "passphrase", ",", "timeout", ",", "mint_only", "]", "...
used to unlock wallet for minting
[ "used", "to", "unlock", "wallet", "for", "minting" ]
6edd854c7fd607ad9f6f4d5eb8b8b7c7fd8c16cc
https://github.com/peercoin/peercoin_rpc/blob/6edd854c7fd607ad9f6f4d5eb8b8b7c7fd8c16cc/peercoin_rpc/peercoin_rpc.py#L119-L121
train
30,877
peercoin/peercoin_rpc
peercoin_rpc/peercoin_rpc.py
Client.getblock
def getblock(self, blockhash, decode=False): """returns detail block info.""" if not decode: decode = "false" return self.req("getblock", [blockhash, decode]) else: return self.req("getblock", [blockhash])
python
def getblock(self, blockhash, decode=False): """returns detail block info.""" if not decode: decode = "false" return self.req("getblock", [blockhash, decode]) else: return self.req("getblock", [blockhash])
[ "def", "getblock", "(", "self", ",", "blockhash", ",", "decode", "=", "False", ")", ":", "if", "not", "decode", ":", "decode", "=", "\"false\"", "return", "self", ".", "req", "(", "\"getblock\"", ",", "[", "blockhash", ",", "decode", "]", ")", "else", ...
returns detail block info.
[ "returns", "detail", "block", "info", "." ]
6edd854c7fd607ad9f6f4d5eb8b8b7c7fd8c16cc
https://github.com/peercoin/peercoin_rpc/blob/6edd854c7fd607ad9f6f4d5eb8b8b7c7fd8c16cc/peercoin_rpc/peercoin_rpc.py#L123-L132
train
30,878
peercoin/peercoin_rpc
peercoin_rpc/peercoin_rpc.py
Client.sendfrom
def sendfrom(self, account, address, amount): """send outgoing tx from specified account to a given address""" return self.req("sendfrom", [account, address, amount])
python
def sendfrom(self, account, address, amount): """send outgoing tx from specified account to a given address""" return self.req("sendfrom", [account, address, amount])
[ "def", "sendfrom", "(", "self", ",", "account", ",", "address", ",", "amount", ")", ":", "return", "self", ".", "req", "(", "\"sendfrom\"", ",", "[", "account", ",", "address", ",", "amount", "]", ")" ]
send outgoing tx from specified account to a given address
[ "send", "outgoing", "tx", "from", "specified", "account", "to", "a", "given", "address" ]
6edd854c7fd607ad9f6f4d5eb8b8b7c7fd8c16cc
https://github.com/peercoin/peercoin_rpc/blob/6edd854c7fd607ad9f6f4d5eb8b8b7c7fd8c16cc/peercoin_rpc/peercoin_rpc.py#L187-L189
train
30,879
peercoin/peercoin_rpc
peercoin_rpc/peercoin_rpc.py
Client.listtransactions
def listtransactions(self, account="", many=999, since=0): """list all transactions associated with this wallet""" return self.req("listtransactions", [account, many, since])
python
def listtransactions(self, account="", many=999, since=0): """list all transactions associated with this wallet""" return self.req("listtransactions", [account, many, since])
[ "def", "listtransactions", "(", "self", ",", "account", "=", "\"\"", ",", "many", "=", "999", ",", "since", "=", "0", ")", ":", "return", "self", ".", "req", "(", "\"listtransactions\"", ",", "[", "account", ",", "many", ",", "since", "]", ")" ]
list all transactions associated with this wallet
[ "list", "all", "transactions", "associated", "with", "this", "wallet" ]
6edd854c7fd607ad9f6f4d5eb8b8b7c7fd8c16cc
https://github.com/peercoin/peercoin_rpc/blob/6edd854c7fd607ad9f6f4d5eb8b8b7c7fd8c16cc/peercoin_rpc/peercoin_rpc.py#L211-L213
train
30,880
peercoin/peercoin_rpc
peercoin_rpc/peercoin_rpc.py
Client.verifymessage
def verifymessage(self, address, signature, message): """Verify a signed message.""" return self.req("verifymessage", [address, signature, message])
python
def verifymessage(self, address, signature, message): """Verify a signed message.""" return self.req("verifymessage", [address, signature, message])
[ "def", "verifymessage", "(", "self", ",", "address", ",", "signature", ",", "message", ")", ":", "return", "self", ".", "req", "(", "\"verifymessage\"", ",", "[", "address", ",", "signature", ",", "message", "]", ")" ]
Verify a signed message.
[ "Verify", "a", "signed", "message", "." ]
6edd854c7fd607ad9f6f4d5eb8b8b7c7fd8c16cc
https://github.com/peercoin/peercoin_rpc/blob/6edd854c7fd607ad9f6f4d5eb8b8b7c7fd8c16cc/peercoin_rpc/peercoin_rpc.py#L277-L279
train
30,881
NLeSC/noodles
noodles/lib/coroutine.py
coroutine
def coroutine(f): """ A sink should be send `None` first, so that the coroutine arrives at the `yield` position. This wrapper takes care that this is done automatically when the coroutine is started. """ @wraps(f) def g(*args, **kwargs): sink = f(*args, **kwargs) sink.send(None) return sink return g
python
def coroutine(f): """ A sink should be send `None` first, so that the coroutine arrives at the `yield` position. This wrapper takes care that this is done automatically when the coroutine is started. """ @wraps(f) def g(*args, **kwargs): sink = f(*args, **kwargs) sink.send(None) return sink return g
[ "def", "coroutine", "(", "f", ")", ":", "@", "wraps", "(", "f", ")", "def", "g", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "sink", "=", "f", "(", "*", "args", ",", "*", "*", "kwargs", ")", "sink", ".", "send", "(", "None", ")", ...
A sink should be send `None` first, so that the coroutine arrives at the `yield` position. This wrapper takes care that this is done automatically when the coroutine is started.
[ "A", "sink", "should", "be", "send", "None", "first", "so", "that", "the", "coroutine", "arrives", "at", "the", "yield", "position", ".", "This", "wrapper", "takes", "care", "that", "this", "is", "done", "automatically", "when", "the", "coroutine", "is", "...
3759e24e6e54a3a1a364431309dbb1061f617c04
https://github.com/NLeSC/noodles/blob/3759e24e6e54a3a1a364431309dbb1061f617c04/noodles/lib/coroutine.py#L4-L16
train
30,882
NLeSC/noodles
noodles/interface/maybe.py
Fail.add_call
def add_call(self, func): """Add a call to the trace.""" self.trace.append("{} ({}:{})".format( object_name(func), inspect.getsourcefile(func), inspect.getsourcelines(func)[1])) return self
python
def add_call(self, func): """Add a call to the trace.""" self.trace.append("{} ({}:{})".format( object_name(func), inspect.getsourcefile(func), inspect.getsourcelines(func)[1])) return self
[ "def", "add_call", "(", "self", ",", "func", ")", ":", "self", ".", "trace", ".", "append", "(", "\"{} ({}:{})\"", ".", "format", "(", "object_name", "(", "func", ")", ",", "inspect", ".", "getsourcefile", "(", "func", ")", ",", "inspect", ".", "getsou...
Add a call to the trace.
[ "Add", "a", "call", "to", "the", "trace", "." ]
3759e24e6e54a3a1a364431309dbb1061f617c04
https://github.com/NLeSC/noodles/blob/3759e24e6e54a3a1a364431309dbb1061f617c04/noodles/interface/maybe.py#L33-L40
train
30,883
NLeSC/noodles
noodles/patterns/functional_patterns.py
all
def all(pred: Callable, xs: Iterable): """ Check whether all the elements of the iterable `xs` fullfill predicate `pred`. :param pred: predicate function :param xs: iterable object. :returns: boolean """ for x in xs: if not pred(x): return False return True
python
def all(pred: Callable, xs: Iterable): """ Check whether all the elements of the iterable `xs` fullfill predicate `pred`. :param pred: predicate function :param xs: iterable object. :returns: boolean """ for x in xs: if not pred(x): return False return True
[ "def", "all", "(", "pred", ":", "Callable", ",", "xs", ":", "Iterable", ")", ":", "for", "x", "in", "xs", ":", "if", "not", "pred", "(", "x", ")", ":", "return", "False", "return", "True" ]
Check whether all the elements of the iterable `xs` fullfill predicate `pred`. :param pred: predicate function :param xs: iterable object. :returns: boolean
[ "Check", "whether", "all", "the", "elements", "of", "the", "iterable", "xs", "fullfill", "predicate", "pred", "." ]
3759e24e6e54a3a1a364431309dbb1061f617c04
https://github.com/NLeSC/noodles/blob/3759e24e6e54a3a1a364431309dbb1061f617c04/noodles/patterns/functional_patterns.py#L7-L22
train
30,884
NLeSC/noodles
noodles/patterns/functional_patterns.py
any
def any(pred: Callable, xs: Iterable): """ Check if at least one element of the iterable `xs` fullfills predicate `pred`. :param pred: predicate function. :param xs: iterable object. :returns: boolean """ b = find_first(pred, xs) return True if b is not None else False
python
def any(pred: Callable, xs: Iterable): """ Check if at least one element of the iterable `xs` fullfills predicate `pred`. :param pred: predicate function. :param xs: iterable object. :returns: boolean """ b = find_first(pred, xs) return True if b is not None else False
[ "def", "any", "(", "pred", ":", "Callable", ",", "xs", ":", "Iterable", ")", ":", "b", "=", "find_first", "(", "pred", ",", "xs", ")", "return", "True", "if", "b", "is", "not", "None", "else", "False" ]
Check if at least one element of the iterable `xs` fullfills predicate `pred`. :param pred: predicate function. :param xs: iterable object. :returns: boolean
[ "Check", "if", "at", "least", "one", "element", "of", "the", "iterable", "xs", "fullfills", "predicate", "pred", "." ]
3759e24e6e54a3a1a364431309dbb1061f617c04
https://github.com/NLeSC/noodles/blob/3759e24e6e54a3a1a364431309dbb1061f617c04/noodles/patterns/functional_patterns.py#L26-L39
train
30,885
NLeSC/noodles
noodles/patterns/functional_patterns.py
map
def map(fun: Callable, xs: Iterable): """ Traverse an iterable object applying function `fun` to each element and finally creats a workflow from it. :param fun: function to call in each element of the iterable object. :param xs: Iterable object. returns::py:class:`PromisedObject` """ generator = (fun(x) for x in xs) return gather(*generator)
python
def map(fun: Callable, xs: Iterable): """ Traverse an iterable object applying function `fun` to each element and finally creats a workflow from it. :param fun: function to call in each element of the iterable object. :param xs: Iterable object. returns::py:class:`PromisedObject` """ generator = (fun(x) for x in xs) return gather(*generator)
[ "def", "map", "(", "fun", ":", "Callable", ",", "xs", ":", "Iterable", ")", ":", "generator", "=", "(", "fun", "(", "x", ")", "for", "x", "in", "xs", ")", "return", "gather", "(", "*", "generator", ")" ]
Traverse an iterable object applying function `fun` to each element and finally creats a workflow from it. :param fun: function to call in each element of the iterable object. :param xs: Iterable object. returns::py:class:`PromisedObject`
[ "Traverse", "an", "iterable", "object", "applying", "function", "fun", "to", "each", "element", "and", "finally", "creats", "a", "workflow", "from", "it", "." ]
3759e24e6e54a3a1a364431309dbb1061f617c04
https://github.com/NLeSC/noodles/blob/3759e24e6e54a3a1a364431309dbb1061f617c04/noodles/patterns/functional_patterns.py#L82-L97
train
30,886
NLeSC/noodles
noodles/patterns/functional_patterns.py
zip_with
def zip_with(fun: Callable, xs: Iterable, ys: Iterable): """ Fuse two Iterable object using the function `fun`. Notice that if the two objects have different len, the shortest object gives the result's shape. :param fun: function taking two argument use to process element x from `xs` and y from `ys`. :param xs: first iterable. :param ys: second iterable. returns::py:class:`PromisedObject` """ generator = (fun(*rs) for rs in zip(xs, ys)) return gather(*generator)
python
def zip_with(fun: Callable, xs: Iterable, ys: Iterable): """ Fuse two Iterable object using the function `fun`. Notice that if the two objects have different len, the shortest object gives the result's shape. :param fun: function taking two argument use to process element x from `xs` and y from `ys`. :param xs: first iterable. :param ys: second iterable. returns::py:class:`PromisedObject` """ generator = (fun(*rs) for rs in zip(xs, ys)) return gather(*generator)
[ "def", "zip_with", "(", "fun", ":", "Callable", ",", "xs", ":", "Iterable", ",", "ys", ":", "Iterable", ")", ":", "generator", "=", "(", "fun", "(", "*", "rs", ")", "for", "rs", "in", "zip", "(", "xs", ",", "ys", ")", ")", "return", "gather", "...
Fuse two Iterable object using the function `fun`. Notice that if the two objects have different len, the shortest object gives the result's shape. :param fun: function taking two argument use to process element x from `xs` and y from `ys`. :param xs: first iterable. :param ys: second iterable. returns::py:class:`PromisedObject`
[ "Fuse", "two", "Iterable", "object", "using", "the", "function", "fun", ".", "Notice", "that", "if", "the", "two", "objects", "have", "different", "len", "the", "shortest", "object", "gives", "the", "result", "s", "shape", "." ]
3759e24e6e54a3a1a364431309dbb1061f617c04
https://github.com/NLeSC/noodles/blob/3759e24e6e54a3a1a364431309dbb1061f617c04/noodles/patterns/functional_patterns.py#L101-L121
train
30,887
NLeSC/noodles
noodles/workflow/model.py
_arg_to_str
def _arg_to_str(arg): """Convert argument to a string.""" if isinstance(arg, str): return _sugar(repr(arg)) elif arg is Empty: return '\u2014' else: return _sugar(str(arg))
python
def _arg_to_str(arg): """Convert argument to a string.""" if isinstance(arg, str): return _sugar(repr(arg)) elif arg is Empty: return '\u2014' else: return _sugar(str(arg))
[ "def", "_arg_to_str", "(", "arg", ")", ":", "if", "isinstance", "(", "arg", ",", "str", ")", ":", "return", "_sugar", "(", "repr", "(", "arg", ")", ")", "elif", "arg", "is", "Empty", ":", "return", "'\\u2014'", "else", ":", "return", "_sugar", "(", ...
Convert argument to a string.
[ "Convert", "argument", "to", "a", "string", "." ]
3759e24e6e54a3a1a364431309dbb1061f617c04
https://github.com/NLeSC/noodles/blob/3759e24e6e54a3a1a364431309dbb1061f617c04/noodles/workflow/model.py#L19-L26
train
30,888
NLeSC/noodles
noodles/workflow/model.py
is_node_ready
def is_node_ready(node): """Returns True if none of the argument holders contain any `Empty` object. """ return all(ref_argument(node.bound_args, a) is not Empty for a in serialize_arguments(node.bound_args))
python
def is_node_ready(node): """Returns True if none of the argument holders contain any `Empty` object. """ return all(ref_argument(node.bound_args, a) is not Empty for a in serialize_arguments(node.bound_args))
[ "def", "is_node_ready", "(", "node", ")", ":", "return", "all", "(", "ref_argument", "(", "node", ".", "bound_args", ",", "a", ")", "is", "not", "Empty", "for", "a", "in", "serialize_arguments", "(", "node", ".", "bound_args", ")", ")" ]
Returns True if none of the argument holders contain any `Empty` object.
[ "Returns", "True", "if", "none", "of", "the", "argument", "holders", "contain", "any", "Empty", "object", "." ]
3759e24e6e54a3a1a364431309dbb1061f617c04
https://github.com/NLeSC/noodles/blob/3759e24e6e54a3a1a364431309dbb1061f617c04/noodles/workflow/model.py#L137-L141
train
30,889
NLeSC/noodles
noodles/prov/sqlite.py
JobDB.add_job_to_db
def add_job_to_db(self, key, job): """Add job info to the database.""" job_msg = self.registry.deep_encode(job) prov = prov_key(job_msg) def set_link(duplicate_id): self.cur.execute( 'update "jobs" set "link" = ?, "status" = ? where "id" = ?', (duplicate_id, Status.DUPLICATE, key)) with self.lock: self.cur.execute( 'select * from "jobs" where "prov" = ? ' 'and (("result" is not null) or ' '("session" = ? and "link" is null))', (prov, self.session)) rec = self.cur.fetchone() rec = JobEntry(*rec) if rec is not None else None self.cur.execute( 'update "jobs" set "prov" = ?, "version" = ?, "function" = ?, ' '"arguments" = ?, "status" = ? where "id" = ?', (prov, job_msg['data']['hints'].get('version'), json.dumps(job_msg['data']['function']), json.dumps(job_msg['data']['arguments']), Status.WAITING, key)) if not rec: # no duplicate found, go on return 'initialized', None set_link(rec.id) if rec.result is not None and rec.status == Status.WORKFLOW: # the found duplicate returned a workflow if rec.link is not None: # link is set, so result is fully realized self.cur.execute( 'select * from "jobs" where "id" = ?', (rec.link,)) rec = self.cur.fetchone() assert rec is not None, "database integrity violation" rec = JobEntry(*rec) else: # link is not set, the result is still waited upon assert rec.session == self.session, \ "database integrity violation" self.attached[rec.id].append(key) return 'attached', None if rec.result is not None: # result is found! return it result_value = self.registry.from_json(rec.result, deref=True) result = ResultMessage( key, 'retrieved', result_value, None) return 'retrieved', result if rec.session == self.session: # still waiting for result, attach self.attached[rec.id].append(key) return 'attached', None
python
def add_job_to_db(self, key, job): """Add job info to the database.""" job_msg = self.registry.deep_encode(job) prov = prov_key(job_msg) def set_link(duplicate_id): self.cur.execute( 'update "jobs" set "link" = ?, "status" = ? where "id" = ?', (duplicate_id, Status.DUPLICATE, key)) with self.lock: self.cur.execute( 'select * from "jobs" where "prov" = ? ' 'and (("result" is not null) or ' '("session" = ? and "link" is null))', (prov, self.session)) rec = self.cur.fetchone() rec = JobEntry(*rec) if rec is not None else None self.cur.execute( 'update "jobs" set "prov" = ?, "version" = ?, "function" = ?, ' '"arguments" = ?, "status" = ? where "id" = ?', (prov, job_msg['data']['hints'].get('version'), json.dumps(job_msg['data']['function']), json.dumps(job_msg['data']['arguments']), Status.WAITING, key)) if not rec: # no duplicate found, go on return 'initialized', None set_link(rec.id) if rec.result is not None and rec.status == Status.WORKFLOW: # the found duplicate returned a workflow if rec.link is not None: # link is set, so result is fully realized self.cur.execute( 'select * from "jobs" where "id" = ?', (rec.link,)) rec = self.cur.fetchone() assert rec is not None, "database integrity violation" rec = JobEntry(*rec) else: # link is not set, the result is still waited upon assert rec.session == self.session, \ "database integrity violation" self.attached[rec.id].append(key) return 'attached', None if rec.result is not None: # result is found! return it result_value = self.registry.from_json(rec.result, deref=True) result = ResultMessage( key, 'retrieved', result_value, None) return 'retrieved', result if rec.session == self.session: # still waiting for result, attach self.attached[rec.id].append(key) return 'attached', None
[ "def", "add_job_to_db", "(", "self", ",", "key", ",", "job", ")", ":", "job_msg", "=", "self", ".", "registry", ".", "deep_encode", "(", "job", ")", "prov", "=", "prov_key", "(", "job_msg", ")", "def", "set_link", "(", "duplicate_id", ")", ":", "self",...
Add job info to the database.
[ "Add", "job", "info", "to", "the", "database", "." ]
3759e24e6e54a3a1a364431309dbb1061f617c04
https://github.com/NLeSC/noodles/blob/3759e24e6e54a3a1a364431309dbb1061f617c04/noodles/prov/sqlite.py#L245-L307
train
30,890
NLeSC/noodles
noodles/prov/sqlite.py
JobDB.job_exists
def job_exists(self, prov): """Check if a job exists in the database.""" with self.lock: self.cur.execute('select * from "jobs" where "prov" = ?;', (prov,)) rec = self.cur.fetchone() return rec is not None
python
def job_exists(self, prov): """Check if a job exists in the database.""" with self.lock: self.cur.execute('select * from "jobs" where "prov" = ?;', (prov,)) rec = self.cur.fetchone() return rec is not None
[ "def", "job_exists", "(", "self", ",", "prov", ")", ":", "with", "self", ".", "lock", ":", "self", ".", "cur", ".", "execute", "(", "'select * from \"jobs\" where \"prov\" = ?;'", ",", "(", "prov", ",", ")", ")", "rec", "=", "self", ".", "cur", ".", "f...
Check if a job exists in the database.
[ "Check", "if", "a", "job", "exists", "in", "the", "database", "." ]
3759e24e6e54a3a1a364431309dbb1061f617c04
https://github.com/NLeSC/noodles/blob/3759e24e6e54a3a1a364431309dbb1061f617c04/noodles/prov/sqlite.py#L309-L314
train
30,891
NLeSC/noodles
noodles/prov/sqlite.py
JobDB.store_result_in_db
def store_result_in_db(self, result, always_cache=True): """Store a result in the database.""" job = self[result.key] def extend_dependent_links(): with self.lock: new_workflow_id = id(get_workflow(result.value)) self.links[new_workflow_id].extend( self.links[id(job.workflow)]) del self.links[id(job.workflow)] def store_result(status): result_value_msg = self.registry.to_json(result.value) with self.lock: self.cur.execute( 'update "jobs" set "result" = ?, ' '"status" = ? where "id" = ?;', (result_value_msg, status, result.key)) def acquire_links(): with self.lock: linked_keys = tuple(self.links[id(job.workflow)]) del self.links[id(job.workflow)] # update links for jobs up in the call-stack (parent workflows) n_questions = ','.join('?' * len(linked_keys)) self.cur.execute( 'update "jobs" set "link" = ? where "id" in ({});' .format(n_questions), (result.key,) + linked_keys) # jobs that were attached to the parent workflow(s) will not # receive the current result automatically, so we need to force # feed them to the scheduler. attached_keys = () for k in linked_keys: attached_keys += tuple(self.attached[k]) del self.attached[k] return attached_keys # if the returned job is not set to be stored, but a parent job # is, we still need to store the result. if 'store' not in job.hints and not always_cache: if job.is_root_node and id(job.workflow) in self.links: if is_workflow(result.value): extend_dependent_links() return () else: store_result(Status.LINKEE) return acquire_links() else: return () # if the return value is a workflow, store the workflow, and add # links to this job, to be updated when the resolved result comes in if is_workflow(result.value): store_result(Status.WORKFLOW) with self.lock: self.links[id(get_workflow(result.value))].append(result.key) if job.is_root_node: extend_dependent_links() return () store_result(Status.STORED) with self.lock: attached_keys = tuple(self.attached[result.key]) del self.attached[result.key] if job.is_root_node: attached_keys += acquire_links() return attached_keys
python
def store_result_in_db(self, result, always_cache=True): """Store a result in the database.""" job = self[result.key] def extend_dependent_links(): with self.lock: new_workflow_id = id(get_workflow(result.value)) self.links[new_workflow_id].extend( self.links[id(job.workflow)]) del self.links[id(job.workflow)] def store_result(status): result_value_msg = self.registry.to_json(result.value) with self.lock: self.cur.execute( 'update "jobs" set "result" = ?, ' '"status" = ? where "id" = ?;', (result_value_msg, status, result.key)) def acquire_links(): with self.lock: linked_keys = tuple(self.links[id(job.workflow)]) del self.links[id(job.workflow)] # update links for jobs up in the call-stack (parent workflows) n_questions = ','.join('?' * len(linked_keys)) self.cur.execute( 'update "jobs" set "link" = ? where "id" in ({});' .format(n_questions), (result.key,) + linked_keys) # jobs that were attached to the parent workflow(s) will not # receive the current result automatically, so we need to force # feed them to the scheduler. attached_keys = () for k in linked_keys: attached_keys += tuple(self.attached[k]) del self.attached[k] return attached_keys # if the returned job is not set to be stored, but a parent job # is, we still need to store the result. if 'store' not in job.hints and not always_cache: if job.is_root_node and id(job.workflow) in self.links: if is_workflow(result.value): extend_dependent_links() return () else: store_result(Status.LINKEE) return acquire_links() else: return () # if the return value is a workflow, store the workflow, and add # links to this job, to be updated when the resolved result comes in if is_workflow(result.value): store_result(Status.WORKFLOW) with self.lock: self.links[id(get_workflow(result.value))].append(result.key) if job.is_root_node: extend_dependent_links() return () store_result(Status.STORED) with self.lock: attached_keys = tuple(self.attached[result.key]) del self.attached[result.key] if job.is_root_node: attached_keys += acquire_links() return attached_keys
[ "def", "store_result_in_db", "(", "self", ",", "result", ",", "always_cache", "=", "True", ")", ":", "job", "=", "self", "[", "result", ".", "key", "]", "def", "extend_dependent_links", "(", ")", ":", "with", "self", ".", "lock", ":", "new_workflow_id", ...
Store a result in the database.
[ "Store", "a", "result", "in", "the", "database", "." ]
3759e24e6e54a3a1a364431309dbb1061f617c04
https://github.com/NLeSC/noodles/blob/3759e24e6e54a3a1a364431309dbb1061f617c04/noodles/prov/sqlite.py#L316-L388
train
30,892
NLeSC/noodles
noodles/prov/sqlite.py
JobDB.add_time_stamp
def add_time_stamp(self, db_id, name): """Add a timestamp to the database.""" with self.lock: self.cur.execute( 'insert into "timestamps" ("job", "what")' 'values (?, ?);', (db_id, name))
python
def add_time_stamp(self, db_id, name): """Add a timestamp to the database.""" with self.lock: self.cur.execute( 'insert into "timestamps" ("job", "what")' 'values (?, ?);', (db_id, name))
[ "def", "add_time_stamp", "(", "self", ",", "db_id", ",", "name", ")", ":", "with", "self", ".", "lock", ":", "self", ".", "cur", ".", "execute", "(", "'insert into \"timestamps\" (\"job\", \"what\")'", "'values (?, ?);'", ",", "(", "db_id", ",", "name", ")", ...
Add a timestamp to the database.
[ "Add", "a", "timestamp", "to", "the", "database", "." ]
3759e24e6e54a3a1a364431309dbb1061f617c04
https://github.com/NLeSC/noodles/blob/3759e24e6e54a3a1a364431309dbb1061f617c04/noodles/prov/sqlite.py#L390-L395
train
30,893
NLeSC/noodles
examples/static_sum.py
static_sum
def static_sum(values, limit_n=1000): """Example of static sum routine.""" if len(values) < limit_n: return sum(values) else: half = len(values) // 2 return add( static_sum(values[:half], limit_n), static_sum(values[half:], limit_n))
python
def static_sum(values, limit_n=1000): """Example of static sum routine.""" if len(values) < limit_n: return sum(values) else: half = len(values) // 2 return add( static_sum(values[:half], limit_n), static_sum(values[half:], limit_n))
[ "def", "static_sum", "(", "values", ",", "limit_n", "=", "1000", ")", ":", "if", "len", "(", "values", ")", "<", "limit_n", ":", "return", "sum", "(", "values", ")", "else", ":", "half", "=", "len", "(", "values", ")", "//", "2", "return", "add", ...
Example of static sum routine.
[ "Example", "of", "static", "sum", "routine", "." ]
3759e24e6e54a3a1a364431309dbb1061f617c04
https://github.com/NLeSC/noodles/blob/3759e24e6e54a3a1a364431309dbb1061f617c04/examples/static_sum.py#L7-L16
train
30,894
NLeSC/noodles
examples/static_sum.py
dynamic_sum
def dynamic_sum(values, limit_n=1000, acc=0, depth=4): """Example of dynamic sum.""" if len(values) < limit_n: return acc + sum(values) if depth > 0: half = len(values) // 2 return add( dynamic_sum(values[:half], limit_n, acc, depth=depth-1), dynamic_sum(values[half:], limit_n, 0, depth=depth-1)) return dynamic_sum(values[limit_n:], limit_n, acc + sum(values[:limit_n]), depth)
python
def dynamic_sum(values, limit_n=1000, acc=0, depth=4): """Example of dynamic sum.""" if len(values) < limit_n: return acc + sum(values) if depth > 0: half = len(values) // 2 return add( dynamic_sum(values[:half], limit_n, acc, depth=depth-1), dynamic_sum(values[half:], limit_n, 0, depth=depth-1)) return dynamic_sum(values[limit_n:], limit_n, acc + sum(values[:limit_n]), depth)
[ "def", "dynamic_sum", "(", "values", ",", "limit_n", "=", "1000", ",", "acc", "=", "0", ",", "depth", "=", "4", ")", ":", "if", "len", "(", "values", ")", "<", "limit_n", ":", "return", "acc", "+", "sum", "(", "values", ")", "if", "depth", ">", ...
Example of dynamic sum.
[ "Example", "of", "dynamic", "sum", "." ]
3759e24e6e54a3a1a364431309dbb1061f617c04
https://github.com/NLeSC/noodles/blob/3759e24e6e54a3a1a364431309dbb1061f617c04/examples/static_sum.py#L20-L32
train
30,895
NLeSC/noodles
noodles/lib/queue.py
Queue.flush
def flush(self): """Erases queue and set `end-of-queue` message.""" while not self._queue.empty(): self._queue.get() self._queue.task_done() self.close()
python
def flush(self): """Erases queue and set `end-of-queue` message.""" while not self._queue.empty(): self._queue.get() self._queue.task_done() self.close()
[ "def", "flush", "(", "self", ")", ":", "while", "not", "self", ".", "_queue", ".", "empty", "(", ")", ":", "self", ".", "_queue", ".", "get", "(", ")", "self", ".", "_queue", ".", "task_done", "(", ")", "self", ".", "close", "(", ")" ]
Erases queue and set `end-of-queue` message.
[ "Erases", "queue", "and", "set", "end", "-", "of", "-", "queue", "message", "." ]
3759e24e6e54a3a1a364431309dbb1061f617c04
https://github.com/NLeSC/noodles/blob/3759e24e6e54a3a1a364431309dbb1061f617c04/noodles/lib/queue.py#L71-L76
train
30,896
NLeSC/noodles
noodles/workflow/graphs.py
find_links_to
def find_links_to(links, node): """Find links to a node. :param links: forward links of a workflow :type links: Mapping[NodeId, Set[(NodeId, ArgumentType, [int|str]])] :param node: index to a node :type node: int :returns: dictionary of sources for each argument :rtype: Mapping[(ArgumentType, [int|str]), NodeId] """ return {address: src for src, (tgt, address) in _all_valid(links) if tgt == node}
python
def find_links_to(links, node): """Find links to a node. :param links: forward links of a workflow :type links: Mapping[NodeId, Set[(NodeId, ArgumentType, [int|str]])] :param node: index to a node :type node: int :returns: dictionary of sources for each argument :rtype: Mapping[(ArgumentType, [int|str]), NodeId] """ return {address: src for src, (tgt, address) in _all_valid(links) if tgt == node}
[ "def", "find_links_to", "(", "links", ",", "node", ")", ":", "return", "{", "address", ":", "src", "for", "src", ",", "(", "tgt", ",", "address", ")", "in", "_all_valid", "(", "links", ")", "if", "tgt", "==", "node", "}" ]
Find links to a node. :param links: forward links of a workflow :type links: Mapping[NodeId, Set[(NodeId, ArgumentType, [int|str]])] :param node: index to a node :type node: int :returns: dictionary of sources for each argument :rtype: Mapping[(ArgumentType, [int|str]), NodeId]
[ "Find", "links", "to", "a", "node", "." ]
3759e24e6e54a3a1a364431309dbb1061f617c04
https://github.com/NLeSC/noodles/blob/3759e24e6e54a3a1a364431309dbb1061f617c04/noodles/workflow/graphs.py#L1-L18
train
30,897
NLeSC/noodles
noodles/workflow/graphs.py
_all_valid
def _all_valid(links): """Iterates over all links, forgetting emtpy registers.""" for k, v in links.items(): for i in v: yield k, i
python
def _all_valid(links): """Iterates over all links, forgetting emtpy registers.""" for k, v in links.items(): for i in v: yield k, i
[ "def", "_all_valid", "(", "links", ")", ":", "for", "k", ",", "v", "in", "links", ".", "items", "(", ")", ":", "for", "i", "in", "v", ":", "yield", "k", ",", "i" ]
Iterates over all links, forgetting emtpy registers.
[ "Iterates", "over", "all", "links", "forgetting", "emtpy", "registers", "." ]
3759e24e6e54a3a1a364431309dbb1061f617c04
https://github.com/NLeSC/noodles/blob/3759e24e6e54a3a1a364431309dbb1061f617c04/noodles/workflow/graphs.py#L21-L25
train
30,898
NLeSC/noodles
noodles/workflow/create.py
from_call
def from_call(foo, args, kwargs, hints, call_by_value=True): """Takes a function and a set of arguments it needs to run on. Returns a newly constructed workflow representing the promised value from the evaluation of the function with said arguments. These arguments are stored in a BoundArguments object matching to the signature of the given function ``foo``. That is, bound_args was constructed by doing:: inspect.signature(foo).bind(*args, **kwargs) The arguments stored in the ``bound_args`` object are filtered on being either plain, or promised. If an argument is promised, the value it represents is not actually available and needs to be computed by evaluating a workflow. If an argument is a promised value, the workflow representing the value is added to the new workflow. First all the nodes in the original workflow, if not already present in the new workflow from an earlier argument, are copied to the new workflow, and a new entry is made into the link dictionary. Then the links in the old workflow are also added to the link dictionary. Since the link dictionary points from nodes to a :py:class:`set` of :py:class:`ArgumentAddress` es, no links are duplicated. In the ``bound_args`` object the promised value is replaced by the ``Empty`` object, so that we can see which arguments still have to be evaluated. Doing this for all promised value arguments in the bound_args object, results in a new workflow with all the correct dependencies represented as links in the graph. :param foo: Function (or object) being called. :type foo: Callable :param args: Normal arguments to call :param kwargs: Keyword arguments to call :param hints: Hints that can be passed to the scheduler on where or how to schedule this job. :returns: New workflow. :rtype: Workflow """ # create the bound_args object bound_args = signature(foo).bind(*args, **kwargs) bound_args.apply_defaults() # get the name of the variadic argument if there is one variadic = next((x.name for x in bound_args.signature.parameters.values() if x.kind == Parameter.VAR_POSITIONAL), None) # *HACK* # the BoundArguments class uses a tuple to store the # variadic arguments. Since we need to modify them, # we have to replace the tuple with a list. This works, for now... if variadic: if variadic not in bound_args.arguments: bound_args.arguments[variadic] = [] else: bound_args.arguments[variadic] = \ list(bound_args.arguments[variadic]) # create the node and initialise hash key node = FunctionNode(foo, bound_args, hints) # setup the new workflow root = id(node) nodes = {root: node} links = {root: set()} # walk the arguments to the function call for address in serialize_arguments(node.bound_args): arg = ref_argument(node.bound_args, address) # the argument may still become a workflow if it # has the __deepcopy__ operator overloaded to return a workflow call_by_ref = 'call_by_ref' in hints and \ (hints['call_by_ref'] is True or address.name in hints['call_by_ref']) if not is_workflow(arg) and call_by_value and not call_by_ref: arg = deepcopy(arg) # if still not a workflow, we have a plain value! if not is_workflow(arg): set_argument(node.bound_args, address, arg) continue # merge the argument workflow into the new workflow workflow = get_workflow(arg) set_argument(node.bound_args, address, Empty) for n in workflow.nodes: if n not in nodes: nodes[n] = workflow.nodes[n] links[n] = set() links[n].update(workflow.links[n]) links[workflow.root].add((root, address)) return Workflow(root, nodes, links)
python
def from_call(foo, args, kwargs, hints, call_by_value=True): """Takes a function and a set of arguments it needs to run on. Returns a newly constructed workflow representing the promised value from the evaluation of the function with said arguments. These arguments are stored in a BoundArguments object matching to the signature of the given function ``foo``. That is, bound_args was constructed by doing:: inspect.signature(foo).bind(*args, **kwargs) The arguments stored in the ``bound_args`` object are filtered on being either plain, or promised. If an argument is promised, the value it represents is not actually available and needs to be computed by evaluating a workflow. If an argument is a promised value, the workflow representing the value is added to the new workflow. First all the nodes in the original workflow, if not already present in the new workflow from an earlier argument, are copied to the new workflow, and a new entry is made into the link dictionary. Then the links in the old workflow are also added to the link dictionary. Since the link dictionary points from nodes to a :py:class:`set` of :py:class:`ArgumentAddress` es, no links are duplicated. In the ``bound_args`` object the promised value is replaced by the ``Empty`` object, so that we can see which arguments still have to be evaluated. Doing this for all promised value arguments in the bound_args object, results in a new workflow with all the correct dependencies represented as links in the graph. :param foo: Function (or object) being called. :type foo: Callable :param args: Normal arguments to call :param kwargs: Keyword arguments to call :param hints: Hints that can be passed to the scheduler on where or how to schedule this job. :returns: New workflow. :rtype: Workflow """ # create the bound_args object bound_args = signature(foo).bind(*args, **kwargs) bound_args.apply_defaults() # get the name of the variadic argument if there is one variadic = next((x.name for x in bound_args.signature.parameters.values() if x.kind == Parameter.VAR_POSITIONAL), None) # *HACK* # the BoundArguments class uses a tuple to store the # variadic arguments. Since we need to modify them, # we have to replace the tuple with a list. This works, for now... if variadic: if variadic not in bound_args.arguments: bound_args.arguments[variadic] = [] else: bound_args.arguments[variadic] = \ list(bound_args.arguments[variadic]) # create the node and initialise hash key node = FunctionNode(foo, bound_args, hints) # setup the new workflow root = id(node) nodes = {root: node} links = {root: set()} # walk the arguments to the function call for address in serialize_arguments(node.bound_args): arg = ref_argument(node.bound_args, address) # the argument may still become a workflow if it # has the __deepcopy__ operator overloaded to return a workflow call_by_ref = 'call_by_ref' in hints and \ (hints['call_by_ref'] is True or address.name in hints['call_by_ref']) if not is_workflow(arg) and call_by_value and not call_by_ref: arg = deepcopy(arg) # if still not a workflow, we have a plain value! if not is_workflow(arg): set_argument(node.bound_args, address, arg) continue # merge the argument workflow into the new workflow workflow = get_workflow(arg) set_argument(node.bound_args, address, Empty) for n in workflow.nodes: if n not in nodes: nodes[n] = workflow.nodes[n] links[n] = set() links[n].update(workflow.links[n]) links[workflow.root].add((root, address)) return Workflow(root, nodes, links)
[ "def", "from_call", "(", "foo", ",", "args", ",", "kwargs", ",", "hints", ",", "call_by_value", "=", "True", ")", ":", "# create the bound_args object", "bound_args", "=", "signature", "(", "foo", ")", ".", "bind", "(", "*", "args", ",", "*", "*", "kwarg...
Takes a function and a set of arguments it needs to run on. Returns a newly constructed workflow representing the promised value from the evaluation of the function with said arguments. These arguments are stored in a BoundArguments object matching to the signature of the given function ``foo``. That is, bound_args was constructed by doing:: inspect.signature(foo).bind(*args, **kwargs) The arguments stored in the ``bound_args`` object are filtered on being either plain, or promised. If an argument is promised, the value it represents is not actually available and needs to be computed by evaluating a workflow. If an argument is a promised value, the workflow representing the value is added to the new workflow. First all the nodes in the original workflow, if not already present in the new workflow from an earlier argument, are copied to the new workflow, and a new entry is made into the link dictionary. Then the links in the old workflow are also added to the link dictionary. Since the link dictionary points from nodes to a :py:class:`set` of :py:class:`ArgumentAddress` es, no links are duplicated. In the ``bound_args`` object the promised value is replaced by the ``Empty`` object, so that we can see which arguments still have to be evaluated. Doing this for all promised value arguments in the bound_args object, results in a new workflow with all the correct dependencies represented as links in the graph. :param foo: Function (or object) being called. :type foo: Callable :param args: Normal arguments to call :param kwargs: Keyword arguments to call :param hints: Hints that can be passed to the scheduler on where or how to schedule this job. :returns: New workflow. :rtype: Workflow
[ "Takes", "a", "function", "and", "a", "set", "of", "arguments", "it", "needs", "to", "run", "on", ".", "Returns", "a", "newly", "constructed", "workflow", "representing", "the", "promised", "value", "from", "the", "evaluation", "of", "the", "function", "with...
3759e24e6e54a3a1a364431309dbb1061f617c04
https://github.com/NLeSC/noodles/blob/3759e24e6e54a3a1a364431309dbb1061f617c04/noodles/workflow/create.py#L10-L119
train
30,899