input
stringlengths
11
7.65k
target
stringlengths
22
8.26k
def __init__(self,params,parent): self.params=params self.parent=parent
def test_can_encode_relationship_with_properties(self): a = Node("Person", name="Alice") b = Node("Person", name="Bob") ab = KNOWS(a, b, since=1999) encoded = cypher_repr(ab, related_node_template="{property.name}") self.assertEqual("(Alice)-[:KNOWS {since: 1999}]->(Bob)", encode...
def __init__(self,params,parent): self.params=params self.parent=parent
def __getattr__(cls, name): return MagicMock()
def __init__(self,params,parent): self.params=params self.parent=parent
def combinationSum(self, candidates, target): candidates.sort() self.result = [] self.dfs(candidates,target,0,[]) return self.result
def __init__(self,params,parent): self.params=params self.parent=parent
def dfs(self,candidates,target,start,reslist): length = len(candidates) if target == 0: return self.result.append(reslist)
def __init__(self,params,parent): self.params=params self.parent=parent
def combinationSum(self, candidates, target): self.result = [] self.dfs(candidates,0,target,[]) return self.result
def __init__(self,params,parent): self.params=params self.parent=parent
def dfs(self,can,cursum,target,res): if cursum > target: return if cursum == target: self.result.append(res) return for i in xrange(len(can)): if not res or res[len(res)-1] <= can[i]: self.dfs(can,cursum+can[i],target,res+[can[i]])
def __init__(self,params,parent): self.params=params self.parent=parent
def gpio_init(pin, output): try: with open(f"/sys/class/gpio/gpio{pin}/direction", 'wb') as f: f.write(b"out" if output else b"in") except Exception as e: print(f"Failed to set gpio {pin} direction: {e}")
def __init__(self,params,parent): self.params=params self.parent=parent
def batch_norm(inputs, training, data_format, name=''): """Performs a batch normalization using a standard set of parameters.""" # We set fused=True for a significant performance boost. See # https://www.tensorflow.org/performance/performance_guide#common_fused_ops return tf.compat.v1.layers.batch_normalization...
def __init__(self,params,parent): self.params=params self.parent=parent
def step_impl(context, user_names_str): if not hasattr(context, 'users'): context.users = {} user_names = [name.strip() for name in re.split('and|,', user_names_str)] for user_name in user_names: token = 'fake_token_' + user_name user_id = context.helpers.create_test_user(user_name,...
def __init__(self,params,parent): self.params=params self.parent=parent
def fixed_padding(inputs, kernel_size, data_format): """Pads the input along the spatial dimensions independently of input size. Args: inputs: A tensor of size [batch, channels, height_in, width_in] or [batch, height_in, width_in, channels] depending on data_format. kernel_size: The kernel to be used...
def __init__(self,params,parent): self.params=params self.parent=parent
def step_impl(context, user_name): context.token = context.users[user_name]['token']
def __init__(self,params,parent): self.params=params self.parent=parent
def conv2d_fixed_padding(inputs, filters, kernel_size, strides, data_format, name): """Strided 2-D convolution with explicit padding.""" # The padding is consistent and is based only on `kernel_size`, not on the # dimensions of `inputs` (as opposed to using `tf.layers.conv2d` alone). if...
def __init__(self,params,parent): self.params=params self.parent=parent
def step_impl(context, user_name, count): context.helpers.load_postcards(user_name, count)
def __init__(self,params,parent): self.params=params self.parent=parent
def _building_block_v2(inputs, filters, training, projection_shortcut, strides, data_format, name): """A single block for ResNet v2, without a bottleneck. Batch normalization then ReLu then convolution as described by: Identity Mappings in Deep Residual Networks https://arxiv.org/pdf...
def __init__(self,params,parent): self.params=params self.parent=parent
def step_impl(context, rel_url): context.request = LazyRequest( 'GET', context.helpers.url(rel_url), context.token)
def __init__(self,params,parent): self.params=params self.parent=parent
def projection_shortcut(inputs, name): return conv2d_fixed_padding( inputs=inputs, filters=filters_out, kernel_size=1, strides=strides, data_format=data_format, name=name)
def __init__(self,params,parent): self.params=params self.parent=parent
def step_impl(context, rel_url): context.request = LazyRequest( 'POST', context.helpers.url(rel_url), context.token)
def __init__(self,params,parent): self.params=params self.parent=parent
def __init__(self, resnet_size, bottleneck, num_classes, num_filters, kernel_size, conv_stride, first_pool_size, first_pool_stride, block_sizes, block_strides, ...
def __init__(self,params,parent): self.params=params self.parent=parent
def step_impl(context, name, field): context.request.add_file(context.helpers.file_path(name), field)
def __init__(self,params,parent): self.params=params self.parent=parent
def _custom_dtype_getter(self, # pylint: disable=keyword-arg-before-vararg getter, name, shape=None, dtype=DEFAULT_DTYPE, *args, **kwargs): """Creates va...
def __init__(self,params,parent): self.params=params self.parent=parent
def step_impl(context): data = json.loads(context.text) receiver_name = re.match(r"\<(\w+)'s id\>", data['receiver']).group(1) data['receiver'] = context.users[receiver_name]['id'] context.request.add_data(data)
def __init__(self,params,parent): self.params=params self.parent=parent
def _model_variable_scope(self): """Returns a variable scope that the model should be created under. If self.dtype is a castable type, model variable will be created in fp32 then cast to self.dtype before being used. Returns: A variable scope for the model. """ return tf.compat.v1.varia...
def __init__(self,params,parent): self.params=params self.parent=parent
def step_impl(context, state, code): context.response = context.request.send() context.response.status_code.should.equal(int(code))
def __init__(self,params,parent): self.params=params self.parent=parent
def __call__(self, inputs, training): """Add operations to classify a batch of input images. Args: inputs: A Tensor representing a batch of input images. training: A boolean. Set to True to add operations required only when training the classifier. Returns: A logits Tensor with s...
def __init__(self,params,parent): self.params=params self.parent=parent
def step_impl(context, count): cnt = len(context.response.json()) cnt.should.equal(int(count))
def __init__(self,params,parent): self.params=params self.parent=parent
def noun_chunks(doclike: Union[Doc, Span]) -> Iterator[Span]: """ Detect base noun phrases from a dependency parse. Works on both Doc and Span. """ # fmt: off labels = ["nsubj", "nsubj:pass", "obj", "iobj", "ROOT", "appos", "nmod", "nmod:poss"] # fmt: on doc = doclike.doc # Ensure works on ...
def __init__(self,params,parent): self.params=params self.parent=parent
def __init__(self, leaf): self.leaf = leaf self.lchild = None self.rchild = None
def __init__(self,params,parent): self.params=params self.parent=parent
def get_leafs(self): if self.lchild == None and self.rchild == None: return [self.leaf] else: return self.lchild.get_leafs()+self.rchild.get_leafs()
def __init__(self,params,parent): self.params=params self.parent=parent
def get_level(self, level, queue): if queue == None: queue = [] if level == 1: queue.push(self) else: if self.lchild != None: self.lchild.get_level(level-1, queue) if self.rchild != None: self.rchild.get_level(level-1, queue) return queue
def __init__(self,params,parent): self.params=params self.parent=parent
def paint(self, c): self.leaf.paint(c) if self.lchild != None: self.lchild.paint(c) if self.rchild != None: self.rchild.paint(c)
def __init__(self,params,parent): self.params=params self.parent=parent
def __init__(self, x, y, w, h): self.x = x self.y = y self.w = w self.h = h self.center = (self.x+int(self.w/2),self.y+int(self.h/2)) self.distance_from_center = sqrt((self.center[0]-MAP_WIDTH/2)**2 + (self.center[1]-MAP_HEIGHT/2)**2)
def __init__(self,params,parent): self.params=params self.parent=parent
def paint(self, c): c.stroke_rectangle(self.x, self.y, self.w, self.h)
def __init__(self,params,parent): self.params=params self.parent=parent
def draw_path(self,c,container): c.path(self.center[0],self.center[1],container.center[0],container.center[1])
def __init__(self,params,parent): self.params=params self.parent=parent
def __init__(self, w, h, color = "empty"): self.board = zeros((h,w), dtype=uint8) self.w = w self.h = h self.set_brush(color)
def __init__(self,params,parent): self.params=params self.parent=parent
def set_brush(self, code): self.color = self.brushes[code]
def __init__(self,params,parent): self.params=params self.parent=parent
def stroke_rectangle(self, x, y, w, h): self.line(x,y,w,True) self.line(x,y+h-1,w,True) self.line(x,y,h,False) self.line(x+w-1,y,h,False)
def __init__(self,params,parent): self.params=params self.parent=parent
def filled_rectangle(self, x, y, w, h): self.board[y:y+h,x:x+w] = self.color
def __init__(self,params,parent): self.params=params self.parent=parent
def line(self, x, y, length, horizontal): if horizontal: self.board[y,x:x+length] = self.color else: self.board[y:y+length,x] = self.color
def __init__(self,params,parent): self.params=params self.parent=parent
def path(self,x1,y1,x2,y2): self.board[y1:y2+1,x1:x2+1] = self.color
def __init__(self,params,parent): self.params=params self.parent=parent
def circle(self,x,y,r): for x_offset in range(-r,r+1): for y_offset in range(-r,r+1): if sqrt(x_offset**2+y_offset**2)<r: self.board[x+x_offset,y+y_offset] = self.color
def __init__(self,params,parent): self.params=params self.parent=parent
def draw(self): im = Image.fromarray(self.board) im.save(MAP_NAME)
def __init__(self,params,parent): self.params=params self.parent=parent
def __str__(self): return str(self.board)
def __init__(self,params,parent): self.params=params self.parent=parent
def __init__(self, container): self.x = container.x+randint(1, floor(container.w/3)) self.y = container.y+randint(1, floor(container.h/3)) self.w = container.w-(self.x-container.x) self.h = container.h-(self.y-container.y) self.w -= randint(0,floor(self.w/3)) self.h -= randint(0,floor(self.w/3)) self.envi...
def __init__(self,params,parent): self.params=params self.parent=parent
def paint(self,c): c.filled_rectangle(self.x, self.y,self.w, self.h)
def __init__(self,params,parent): self.params=params self.parent=parent
def _split_vertical(container): r1 = None r2 = None min_w = int(W_RATIO*container.h)+1 if container.w < 2*min_w: return None r1 = Container(container.x,container.y,randint(min_w, container.w-min_w),container.h) r2 = Container(container.x+r1.w,container.y,container.w-r1.w,container.h) return [r1, r2]
def __init__(self,params,parent): self.params=params self.parent=parent
def _split_horizontal(container): r1 = None r2 = None min_h = int(H_RATIO*container.w)+1 if container.h < 2*min_h: return None r1 = Container(container.x,container.y,container.w,randint(min_h, container.h-min_h)) r2 = Container(container.x,container.y+r1.h,container.w,container.h-r1.h) return [r1, r2]
def __init__(self,params,parent): self.params=params self.parent=parent
def split_container(container, iter): root = Tree(container) if iter != 0: sr = random_split(container) if sr!=None: root.lchild = split_container(sr[0], iter-1) root.rchild = split_container(sr[1], iter-1) return root
def __init__(self,params,parent): self.params=params self.parent=parent
def draw_paths(c, tree): if tree.lchild == None or tree.rchild == None: return tree.lchild.leaf.draw_path(c, tree.rchild.leaf) draw_paths(c, tree.lchild) draw_paths(c, tree.rchild)
def __init__(self,params,parent): self.params=params self.parent=parent
def init(num_players): global MAP_WIDTH,MAP_HEIGHT,N_ITERATIONS,H_RATIO,W_RATIO,MIN_ROOM_SIDE,CENTER_HUB_HOLE,CENTER_HUB_RADIO,MAP_NAME MAP_WIDTH=int(500*sqrt(num_players)) MAP_HEIGHT=MAP_WIDTH N_ITERATIONS=log(MAP_WIDTH*100,2) H_RATIO=0.49 W_RATIO=H_RATIO MIN_ROOM_SIDE = 32 CENTER_HUB_HOLE = 32 CENTER_HUB_RAD...
def __init__(self,params,parent): self.params=params self.parent=parent
def get_attn(attn_type): if isinstance(attn_type, torch.nn.Module): return attn_type module_cls = None if attn_type is not None: if isinstance(attn_type, str): attn_type = attn_type.lower() # Lightweight attention modules (channel and/or coarse spatial). #...
def __init__(self,params,parent): self.params=params self.parent=parent
def main(): argument_spec = ec2_argument_spec() argument_spec.update(dict( region = dict(required=True, aliases = ['aws_region', 'ec2_region']), owner = dict(required=False, default=None), ami_id = dict(required=False), ami_tags = dict(required=Fal...
def __init__(self,params,parent): self.params=params self.parent=parent
def start(self, action_name: str) -> None: """Defines how to start recording an action."""
def __init__(self,params,parent): self.params=params self.parent=parent
def install_secret_key(app, filename='secret_key'): """Configure the SECRET_KEY from a file in the instance directory. If the file does not exist, print instructions to create it from a shell with a random key, then exit. """ filename = os.path.join(app.instance_path, filename) try: ...
def __init__(self,params,parent): self.params=params self.parent=parent
def stop(self, action_name: str) -> None: """Defines how to record the duration once an action is complete."""
def __init__(self,params,parent): self.params=params self.parent=parent
def not_found(error): return render_template('404.html'), 404
def __init__(self,params,parent): self.params=params self.parent=parent
def summary(self) -> str: """Create profiler summary in text format."""
def __init__(self,params,parent): self.params=params self.parent=parent
def after_request(response): response.headers.add('X-Test', 'This is only test.') response.headers.add('Access-Control-Allow-Origin', '*') # TODO: set to real origin return response
def __init__(self,params,parent): self.params=params self.parent=parent
def setup(self, **kwargs: Any) -> None: """Execute arbitrary pre-profiling set-up steps as defined by subclass."""
def __init__(self,params,parent): self.params=params self.parent=parent
def teardown(self, **kwargs: Any) -> None: """Execute arbitrary post-profiling tear-down steps as defined by subclass."""
def __init__(self,params,parent): self.params=params self.parent=parent
def __init__( self, dirpath: Optional[Union[str, Path]] = None, filename: Optional[str] = None, ) -> None: self.dirpath = dirpath self.filename = filename self._output_file: Optional[TextIO] = None self._write_stream: Optional[Callable] = None self._l...
def __init__(self,params,parent): self.params=params self.parent=parent
def profile(self, action_name: str) -> Generator: """ Yields a context manager to encapsulate the scope of a profiled action. Example:: with self.profile('load training data'): # load training data code The profiler will start once you've entered the contex...
def __init__(self,params,parent): self.params=params self.parent=parent
def profile_iterable(self, iterable: Iterable, action_name: str) -> Generator: iterator = iter(iterable) while True: try: self.start(action_name) value = next(iterator) self.stop(action_name) yield value except StopI...
def __init__(self,params,parent): self.params=params self.parent=parent
def _rank_zero_info(self, *args, **kwargs) -> None: if self._local_rank in (None, 0): log.info(*args, **kwargs)
def __init__(self,params,parent): self.params=params self.parent=parent
def _prepare_filename( self, action_name: Optional[str] = None, extension: str = ".txt", split_token: str = "-" ) -> str: args = [] if self._stage is not None: args.append(self._stage) if self.filename: args.append(self.filename) if self._local_rank is...
def __init__(self,params,parent): self.params=params self.parent=parent
def _prepare_streams(self) -> None: if self._write_stream is not None: return if self.filename: filepath = os.path.join(self.dirpath, self._prepare_filename()) fs = get_filesystem(filepath) file = fs.open(filepath, "a") self._output_file = file...
def __init__(self,params,parent): self.params=params self.parent=parent
def describe(self) -> None: """Logs a profile report after the conclusion of run.""" # there are pickling issues with open file handles in Python 3.6 # so to avoid them, we open and close the files within this function # by calling `_prepare_streams` and `teardown` self._prepare_...
def __init__(self,params,parent): self.params=params self.parent=parent
def _stats_to_str(self, stats: Dict[str, str]) -> str: stage = f"{self._stage.upper()} " if self._stage is not None else "" output = [stage + "Profiler Report"] for action, value in stats.items(): header = f"Profile stats for: {action}" if self._local_rank is not None: ...
def __init__(self,params,parent): self.params=params self.parent=parent
def setup( self, stage: Optional[str] = None, local_rank: Optional[int] = None, log_dir: Optional[str] = None ) -> None: """Execute arbitrary pre-profiling set-up steps.""" self._stage = stage self._local_rank = local_rank self._log_dir = log_dir self.dirpath = self.d...
def __init__(self,params,parent): self.params=params self.parent=parent
def teardown(self, stage: Optional[str] = None) -> None: """ Execute arbitrary post-profiling tear-down steps. Closes the currently open file and stream. """ self._write_stream = None if self._output_file is not None: self._output_file.close() sel...
def __init__(self,params,parent): self.params=params self.parent=parent
def __del__(self) -> None: self.teardown(stage=self._stage)
def __init__(self,params,parent): self.params=params self.parent=parent
def start(self, action_name: str) -> None: raise NotImplementedError
def __init__(self,params,parent): self.params=params self.parent=parent
def stop(self, action_name: str) -> None: raise NotImplementedError
def __init__(self,params,parent): self.params=params self.parent=parent
def summary(self) -> str: raise NotImplementedError
def __init__(self,params,parent): self.params=params self.parent=parent
def local_rank(self) -> int: return 0 if self._local_rank is None else self._local_rank
def __init__(self,params,parent): self.params=params self.parent=parent
def start(self, action_name: str) -> None: pass
def __init__(self,params,parent): self.params=params self.parent=parent
def stop(self, action_name: str) -> None: pass
def __init__(self,params,parent): self.params=params self.parent=parent
def __init__(self, model, data): # try and import pytorch global torch if torch is None: import torch if version.parse(torch.__version__) < version.parse("0.4"): warnings.warn("Your PyTorch version is older than 0.4 and not supported.") # check if...
def __init__(self,params,parent): self.params=params self.parent=parent
def __init__(self, reddit, term, config, oauth, url=None, submission=None): super(SubmissionPage, self).__init__(reddit, term, config, oauth) self.controller = SubmissionController(self, keymap=config.keymap) if url: self.content = SubmissionContent.from_url( reddit...
def __init__(self,params,parent): self.params=params self.parent=parent
def add_target_handle(self, layer): input_handle = layer.register_forward_hook(get_target_input) self.target_handle = input_handle
def __init__(self,params,parent): self.params=params self.parent=parent
def toggle_comment(self): "Toggle the selected comment tree between visible and hidden" current_index = self.nav.absolute_index self.content.toggle(current_index) # This logic handles a display edge case after a comment toggle. We # want to make sure that when we re-draw the pa...
def __init__(self,params,parent): self.params=params self.parent=parent
def add_handles(self, model, forward_handle, backward_handle): """ Add handles to all non-container layers in the model. Recursively for non-container layers """ handles_list = [] model_children = list(model.children()) if model_children: for child in ...
def __init__(self,params,parent): self.params=params self.parent=parent
def exit_submission(self): "Close the submission and return to the subreddit page" self.active = False
def __init__(self,params,parent): self.params=params self.parent=parent
def remove_attributes(self, model): """ Removes the x and y attributes which were added by the forward handles Recursively searches for non-container layers """ for child in model.children(): if 'nn.modules.container' in str(type(child)): self.remove_a...
def __init__(self,params,parent): self.params=params self.parent=parent
def refresh_content(self, order=None, name=None): "Re-download comments and reset the page index" order = order or self.content.order url = name or self.content.name with self.term.loader('Refreshing page'): self.content = SubmissionContent.from_url( self.re...
def __init__(self,params,parent): self.params=params self.parent=parent
def gradient(self, idx, inputs): self.model.zero_grad() X = [x.requires_grad_() for x in inputs] outputs = self.model(*X) selected = [val for val in outputs[:, idx]] grads = [] if self.interim: interim_inputs = self.layer.target_input for idx, inpu...
def __init__(self,params,parent): self.params=params self.parent=parent
def prompt_subreddit(self): "Open a prompt to navigate to a different subreddit" name = self.term.prompt_input('Enter page: /') if name is not None: with self.term.loader('Loading page'): content = SubredditContent.from_name( self.reddit, name, se...
def __init__(self,params,parent): self.params=params self.parent=parent
def shap_values(self, X, ranked_outputs=None, output_rank_order="max", check_additivity=False): # X ~ self.model_input # X_data ~ self.data # check if we have multiple inputs if not self.multi_input: assert type(X) != list, "Expected a single tensor model input!" ...
def __init__(self,params,parent): self.params=params self.parent=parent
def open_link(self): "Open the selected item with the webbrowser" data = self.get_selected_item() url = data.get('permalink') if url: self.term.open_browser(url) else: self.term.flash()
def __init__(self,params,parent): self.params=params self.parent=parent
def deeplift_grad(module, grad_input, grad_output): """The backward hook which computes the deeplift gradient for an nn.Module """ # first, get the module type module_type = module.__class__.__name__ # first, check the module is supported if module_type in op_handler: if op_handler[m...
def __init__(self,params,parent): self.params=params self.parent=parent
def open_pager(self): "Open the selected item with the system's pager" data = self.get_selected_item() if data['type'] == 'Submission': text = '\n\n'.join((data['permalink'], data['text'])) self.term.open_pager(text) elif data['type'] == 'Comment': tex...
def __init__(self,params,parent): self.params=params self.parent=parent
def add_interim_values(module, input, output): """The forward hook used to save interim tensors, detached from the graph. Used to calculate the multipliers """ try: del module.x except AttributeError: pass try: del module.y except AttributeError: pass modu...
def __init__(self,params,parent): self.params=params self.parent=parent
def add_comment(self): """ Submit a reply to the selected item. Selected item: Submission - add a top level comment Comment - add a comment reply """ data = self.get_selected_item() if data['type'] == 'Submission': body = data['text']...
def __init__(self,params,parent): self.params=params self.parent=parent
def get_target_input(module, input, output): """A forward hook which saves the tensor - attached to its graph. Used if we want to explain the interim outputs of a model """ try: del module.target_input except AttributeError: pass setattr(module, 'target_input', input)
def __init__(self,params,parent): self.params=params self.parent=parent
def delete_comment(self): "Delete the selected comment" if self.get_selected_item()['type'] == 'Comment': self.delete_item() else: self.term.flash()
def __init__(self,params,parent): self.params=params self.parent=parent
def deeplift_tensor_grad(grad): return_grad = complex_module_gradients[-1] del complex_module_gradients[-1] return return_grad
def __init__(self,params,parent): self.params=params self.parent=parent
def comment_urlview(self): data = self.get_selected_item() comment = data.get('body') or data.get('text') or data.get('url_full') if comment: self.term.open_urlview(comment) else: self.term.flash()
def __init__(self,params,parent): self.params=params self.parent=parent
def passthrough(module, grad_input, grad_output): """No change made to gradients""" return None
def __init__(self,params,parent): self.params=params self.parent=parent
def _draw_item(self, win, data, inverted): if data['type'] == 'MoreComments': return self._draw_more_comments(win, data) elif data['type'] == 'HiddenComment': return self._draw_more_comments(win, data) elif data['type'] == 'Comment': return self._draw_comment...
def __init__(self,params,parent): self.params=params self.parent=parent
def maxpool(module, grad_input, grad_output): pool_to_unpool = { 'MaxPool1d': torch.nn.functional.max_unpool1d, 'MaxPool2d': torch.nn.functional.max_unpool2d, 'MaxPool3d': torch.nn.functional.max_unpool3d } pool_to_function = { 'MaxPool1d': torch.nn.functional.max_pool1d, ...
def __init__(self,params,parent): self.params=params self.parent=parent
def _draw_comment(self, win, data, inverted): n_rows, n_cols = win.getmaxyx() n_cols -= 1 # Handle the case where the window is not large enough to fit the text. valid_rows = range(0, n_rows) offset = 0 if not inverted else -(data['n_rows'] - n_rows) # If there isn't e...