query
stringlengths
9
3.4k
document
stringlengths
9
87.4k
metadata
dict
negatives
listlengths
4
101
negative_scores
listlengths
4
101
document_score
stringlengths
3
10
document_rank
stringclasses
102 values
Report usage metrics for all active Crypto Express adapters of CPCs. In addition to the commandspecific options shown in this help text, the general options (see 'zhmc help') can also be specified right after the 'zhmc' command name.
def metrics_crypto(cmd_ctx, cpc, **options): cmd_ctx.execute_cmd(lambda: cmd_metrics_crypto(cmd_ctx, cpc, options))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def metrics_channel(cmd_ctx, cpc, **options):\n cmd_ctx.execute_cmd(lambda: cmd_metrics_channel(cmd_ctx, cpc, options))", "def metrics_adapter(cmd_ctx, cpc, adapter, **options):\n cmd_ctx.execute_cmd(\n lambda: cmd_metrics_adapter(cmd_ctx, cpc, adapter, options))", "def do_hostinfo(self, args):\n ...
[ "0.61710244", "0.5943986", "0.5742064", "0.5595102", "0.55200773", "0.54445076", "0.5411969", "0.53798133", "0.5354008", "0.535235", "0.5349474", "0.5332392", "0.53124905", "0.5226011", "0.51669437", "0.51390135", "0.5104674", "0.5103828", "0.50836706", "0.5078611", "0.507831...
0.6303562
0
Report usage metrics for all active Flash Express adapters of CPCs. In addition to the commandspecific options shown in this help text, the general options (see 'zhmc help') can also be specified right after the 'zhmc' command name.
def metrics_flash(cmd_ctx, cpc, **options): cmd_ctx.execute_cmd(lambda: cmd_metrics_flash(cmd_ctx, cpc, options))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def metrics_channel(cmd_ctx, cpc, **options):\n cmd_ctx.execute_cmd(lambda: cmd_metrics_channel(cmd_ctx, cpc, options))", "def metrics_adapter(cmd_ctx, cpc, adapter, **options):\n cmd_ctx.execute_cmd(\n lambda: cmd_metrics_adapter(cmd_ctx, cpc, adapter, options))", "def do_hostinfo(self, args):\n ...
[ "0.617125", "0.58856094", "0.5707514", "0.5675522", "0.5634532", "0.54365146", "0.53614813", "0.53592706", "0.53404266", "0.53303754", "0.5302817", "0.5274004", "0.5224067", "0.52236396", "0.52125716", "0.51809424", "0.51480365", "0.5107816", "0.5101575", "0.50771254", "0.506...
0.5578356
5
Report usage metrics for all active RoCE adapters of CPCs. In addition to the commandspecific options shown in this help text, the general options (see 'zhmc help') can also be specified right after the 'zhmc' command name.
def metrics_roce(cmd_ctx, cpc, **options): cmd_ctx.execute_cmd(lambda: cmd_metrics_roce(cmd_ctx, cpc, options))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def metrics_channel(cmd_ctx, cpc, **options):\n cmd_ctx.execute_cmd(lambda: cmd_metrics_channel(cmd_ctx, cpc, options))", "def metrics_crypto(cmd_ctx, cpc, **options):\n cmd_ctx.execute_cmd(lambda: cmd_metrics_crypto(cmd_ctx, cpc, options))", "def metrics_adapter(cmd_ctx, cpc, adapter, **options):\n c...
[ "0.6161586", "0.5805755", "0.5797474", "0.5729157", "0.563271", "0.55953175", "0.5529902", "0.55097723", "0.54637194", "0.54599464", "0.54326856", "0.5381607", "0.5343133", "0.5302566", "0.52546537", "0.52469516", "0.52173275", "0.5206837", "0.5200306", "0.51897854", "0.51727...
0.54437673
10
Report usage metrics for the ports of network adapters of CPCs in DPM mode. In addition to the commandspecific options shown in this help text, the general options (see 'zhmc help') can also be specified right after the 'zhmc' command name.
def metrics_networkport(cmd_ctx, cpc, adapter, **options): cmd_ctx.execute_cmd( lambda: cmd_metrics_networkport(cmd_ctx, cpc, adapter, options))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def metrics_channel(cmd_ctx, cpc, **options):\n cmd_ctx.execute_cmd(lambda: cmd_metrics_channel(cmd_ctx, cpc, options))", "def metrics_adapter(cmd_ctx, cpc, adapter, **options):\n cmd_ctx.execute_cmd(\n lambda: cmd_metrics_adapter(cmd_ctx, cpc, adapter, options))", "def do_hostinfo(self, args):\n ...
[ "0.6124976", "0.59950477", "0.5756093", "0.5687688", "0.55233353", "0.5309227", "0.52750784", "0.5252932", "0.5208929", "0.5170246", "0.5106128", "0.50764817", "0.50519806", "0.50233275", "0.5003854", "0.4984393", "0.4930946", "0.49051604", "0.49042228", "0.49029616", "0.4877...
0.70290184
0
Report usage metrics for the NICs of partitions of CPCs in DPM mode. In addition to the commandspecific options shown in this help text, the general options (see 'zhmc help') can also be specified right after the 'zhmc' command name.
def metrics_nic(cmd_ctx, cpc, partition, nic, **options): cmd_ctx.execute_cmd( lambda: cmd_metrics_nic(cmd_ctx, cpc, partition, nic, options))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def metrics_cpc(cmd_ctx, cpc, **options):\n cmd_ctx.execute_cmd(lambda: cmd_metrics_cpc(cmd_ctx, cpc, options))", "def metrics_networkport(cmd_ctx, cpc, adapter, **options):\n cmd_ctx.execute_cmd(\n lambda: cmd_metrics_networkport(cmd_ctx, cpc, adapter, options))", "def metrics_channel(cmd_ctx, cp...
[ "0.6029862", "0.59694433", "0.593867", "0.5872437", "0.57943094", "0.56738853", "0.5640134", "0.5486397", "0.5422634", "0.53095937", "0.5250618", "0.5230403", "0.510633", "0.5087089", "0.50808835", "0.5045866", "0.49836123", "0.4982057", "0.49766484", "0.4954867", "0.49396986...
0.6058172
0
A decorator that uses cProfile to profile a function
def profile(fnc): def inner(*args, **kwargs): pr = cProfile.Profile() pr.enable() retval = fnc(*args, **kwargs) pr.disable() s = io.StringIO() sortby = 'cumulative' ps = pstats.Stats(pr, stream=s).sort_stats(sortby) ps.print_stats() logging.info(s.getvalue()) return retval return inner
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def profile(func):\n def wrapper(*args, **kwargs):\n profile_filename = func.__name__ + '.prof'\n profiler = cProfile.Profile()\n result = profiler.runcall(func, *args, **kwargs)\n profiler.dump_stats(profile_filename)\n return result\n return wrapper", "def profile(func)...
[ "0.84530425", "0.8446885", "0.82060313", "0.81897914", "0.81137836", "0.805328", "0.7986637", "0.7973362", "0.77897024", "0.77074057", "0.77040017", "0.7657467", "0.7563646", "0.7490806", "0.74640775", "0.7423344", "0.7059046", "0.7050842", "0.6984666", "0.6982411", "0.693029...
0.783562
8
Number of days left.
def ssl_valid_time_remaining(domainname): expires = ssl_expiry_date(domainname) return expires - datetime.datetime.utcnow().date()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_number_days(self):\r\n return 1", "def elapsed_days(self) -> int:\n return (datetime.today() - self.release_datetime).days", "def remaining_days_in_current_period(self):\n try:\n return self.count_days_from_now(self.current_period_ends_at)\n except AttributeError:...
[ "0.7505091", "0.7279096", "0.7158447", "0.71100044", "0.7087657", "0.70665115", "0.7026382", "0.7017739", "0.6946899", "0.69389665", "0.6917387", "0.6910316", "0.6885656", "0.68470883", "0.6816302", "0.6804258", "0.6734145", "0.670508", "0.6679895", "0.6673183", "0.66385496",...
0.0
-1
Returns the truncated SHA521 hash of the message.
def get_hash(data, n): import hashlib message_hash = hashlib.sha512(data).digest() e = int.from_bytes(message_hash, 'big') # FIPS 180 says that when a hash needs to be truncated, the rightmost bits # should be discarded. z = e >> (e.bit_length() - n.bit_length()) assert z.bit_length() <= n.bit_length() return z
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _sha512(message):\n return hashlib.sha512(message).hexdigest()", "def _sha512(data):\r\n return hashlib.sha512(data).hexdigest()", "def sha512(s: str) -> str:\n return hashlib.sha512(s.encode()).hexdigest()", "def _hash(data):\n return hashlib.sha512(data).hexdigest()", "def get_sha512_...
[ "0.7193293", "0.6325299", "0.61913234", "0.5894766", "0.58219564", "0.57631546", "0.5762267", "0.5743988", "0.5704884", "0.56312925", "0.56283605", "0.56206113", "0.54951626", "0.54939073", "0.54397", "0.5402861", "0.539359", "0.5392251", "0.535287", "0.534708", "0.5297465", ...
0.5229747
24
Computes the Modulation SpectrumBased ECG Quality Index (MSQI) for one or many ECG signals defined in x, sampled with a sampling frequency fs
def msqi_ama(x, fs): # test ecg shape try: x.shape[1] except IndexError: x = x[:, np.newaxis] # Empirical values for the STFFT transformation win_size_sec = 0.125 #seconds win_over_sec = 0.09375 #seconds nfft_factor_1 = 16 nfft_factor_2 = 4 win_size_smp = int(win_size_sec * fs) #samples win_over_smp = int(win_over_sec * fs) #samples win_shft_smp = win_size_smp - win_over_smp # Computes Modulation Spectrogram modulation_spectrogram = ama.strfft_modulation_spectrogram(x, fs, win_size_smp, win_shft_smp, nfft_factor_1, 'cosine', nfft_factor_2, 'cosine' ) # Find fundamental frequency (HR) # f = (0, 40)Hz ix_f_00 = (np.abs(modulation_spectrogram['freq_axis'] - 0)).argmin(0) ix_f_40 = (np.abs(modulation_spectrogram['freq_axis'] - 40)).argmin(0) + 1 # Look for the maximum only from 0.6 to 3 Hz (36 to 180 bpm) valid_f_ix = np.logical_or(modulation_spectrogram['freq_mod_axis'] < 0.66 , modulation_spectrogram['freq_mod_axis'] > 3) # number of epochs n_epochs = modulation_spectrogram['power_modulation_spectrogram'].shape[2] msqi_vals = np.zeros(n_epochs) hr_vals = np.zeros(n_epochs) for ix_epoch in range(n_epochs): B = np.sqrt(modulation_spectrogram['power_modulation_spectrogram'][:, :, ix_epoch]) # Scale to maximun of B B = B / np.max(B) # Add B in the conventional frequency axis from 0 to 40 Hz tmp = np.sum(B[ix_f_00:ix_f_40, :], axis=0) # Look for the maximum only from 0.6 to 3 Hz (36 to 180 bpm) tmp[valid_f_ix] = 0 ix_max = np.argmax(tmp) freq_funda = modulation_spectrogram['freq_mod_axis'][ix_max] # TME tme = np.sum(B) eme = 0 for ix_harm in range(1, 5): ix_fm = (np.abs(modulation_spectrogram['freq_mod_axis'] - (ix_harm * freq_funda) )).argmin(0) ix_b = int(round(.3125 / modulation_spectrogram['freq_mod_delta'] )) # 0.3125Hz, half lobe # EME eme = eme + np.sum(B[ 0 : ix_f_40, ix_fm - ix_b : ix_fm + ix_b + 1 ]) # RME rme = tme - eme # MS-QI msqi_vals[ix_epoch] = eme / rme # HR hr_vals[ix_epoch] = freq_funda * 60 return (msqi_vals, hr_vals, modulation_spectrogram)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def find_optimal_Q(x, y, data, min_Q, max_Q, fs=1., added_kernel = None, plot_BIC=True):\n Qs= np.arange(min_Q, max_Q)\n BIC = np.zeros((Qs.shape[0]))\n \n for i, q in enumerate(Qs):\n sm = SpectralMixture(q, x=x.flatten(),y=y.flatten(),fs=fs)\n for k in sm.kernels:\n ...
[ "0.5476735", "0.5465266", "0.5431895", "0.5348803", "0.5319814", "0.5249597", "0.5233212", "0.52282983", "0.51446587", "0.51347893", "0.51147515", "0.5109146", "0.50809836", "0.50781864", "0.5074541", "0.50507593", "0.5049618", "0.5042971", "0.50428605", "0.5027192", "0.50248...
0.64019674
0
Helper function to convert raw lines into a minibatch as a DotDict.
def process_batch(self, lines): batch_edges = [] batch_edges_values = [] batch_edges_target = [] # Binary classification targets (0/1) batch_nodes = [] batch_nodes_target = [] # Multi-class classification targets (`num_nodes` classes) batch_nodes_coord = [] batch_mst_edges = [] batch_mst_len = [] for line_num, line in enumerate(lines): line = line.split(" ") # Split into list # Compute signal on nodes nodes = np.ones(self.num_nodes) # All 1s for TSP... # Convert node coordinates to required format nodes_coord = [] for idx in range(0, 2 * self.num_nodes, 2): nodes_coord.append([float(line[idx]), float(line[idx + 1])]) # Compute distance matrix W_val = squareform(pdist(nodes_coord, metric='euclidean')) # Compute adjacency matrix if self.num_neighbors == -1: W = np.ones((self.num_nodes, self.num_nodes)) # Graph is fully connected else: W = np.zeros((self.num_nodes, self.num_nodes)) # Determine k-nearest neighbors for each node knns = np.argpartition(W_val, kth=self.num_neighbors, axis=-1)[:, self.num_neighbors::-1] # Make connections for idx in range(self.num_nodes): W[idx][knns[idx]] = 1 np.fill_diagonal(W, 2) # Special token for self-connections # Convert tour nodes to required format # Don't add final connection for tour/cycle _edges = [edge for edge in line[line.index('output') + 1:]] mst_edges = [literal_eval(' '.join(_edges[i:i+2])) for i in range(0, len(_edges),2)] # gets the edges from line --> equivalent to tour_nodes for TSP # Compute node and edge representation of mst + mst_len mst_len = 0 # length of mst nodes_target = np.zeros(self.num_nodes) edges_target = np.zeros((self.num_nodes, self.num_nodes)) for idx in range(len(mst_edges)): i = mst_edges[idx][0] j = mst_edges[idx][1] nodes_target[i] = idx # node targets: ordering of nodes in tour edges_target[i][j] = 1 edges_target[j][i] = 1 mst_len += W_val[i][j] # Concatenate the data batch_edges.append(W) batch_edges_values.append(W_val) batch_edges_target.append(edges_target) batch_nodes.append(nodes) batch_nodes_target.append(nodes_target) batch_nodes_coord.append(nodes_coord) batch_mst_edges.append(mst_edges) batch_mst_len.append(mst_len) # From list to tensors as a DotDict batch = DotDict() batch.edges = np.stack(batch_edges, axis=0) batch.edges_values = np.stack(batch_edges_values, axis=0) batch.edges_target = np.stack(batch_edges_target, axis=0) batch.nodes = np.stack(batch_nodes, axis=0) batch.nodes_target = np.stack(batch_nodes_target, axis=0) batch.nodes_coord = np.stack(batch_nodes_coord, axis=0) batch.mst_edges = np.stack(batch_mst_edges, axis=0) batch.mst_len = np.stack(batch_mst_len, axis=0) return batch
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def parse_lines_to_dict(lines):\n res = {k: v.strip() for k, v in (m.split(':', 1) for m in lines)}\n return res", "def dictFromLines(lines,sep=None):\n reComment = re.compile('#.*')\n temp = [reComment.sub('',x).strip() for x in lines.split('\\n')]\n if sep == None or type(sep) == type(''):\n ...
[ "0.5950162", "0.57444644", "0.5603637", "0.54143274", "0.5345499", "0.53329337", "0.5319959", "0.53030837", "0.52169234", "0.5213757", "0.5200185", "0.51813257", "0.5157761", "0.5150942", "0.5118812", "0.5090973", "0.50735104", "0.5071656", "0.5063901", "0.50526667", "0.50393...
0.0
-1
Load the image on initial load of the application
def OnInit(self): self.imageID = self.loadImage()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def OnInit( self ):\n self.imageID = self.loadImage ()", "def load_image(self, **kwargs):\n ...", "def load_image(self):\n # Minimal progress display while image is loaded.\n group = displayio.Group()\n group.append(centered_label('LOADING...', 40, 3))\n #self.rect = R...
[ "0.7676823", "0.72862685", "0.6990877", "0.69560045", "0.69560045", "0.69560045", "0.6736933", "0.66402304", "0.6628148", "0.65887284", "0.65709776", "0.6562708", "0.6558569", "0.6513278", "0.64529467", "0.6451759", "0.6443347", "0.64257944", "0.64138216", "0.6399771", "0.637...
0.77568126
0
Load an image file as a 2D texture using PIL
def loadImage(self, imageName="nehe_wall.bmp"): # PIL defines an "open" method which is Image specific! im = open(imageName) try: ix, iy, image = im.size[0], im.size[1], im.tostring("raw", "RGBA", 0, -1) except SystemError: ix, iy, image = im.size[0], im.size[1], im.tostring("raw", "RGBX", 0, -1) # Generate a texture ID ID = glGenTextures(1) # Make our new texture ID the current 2D texture glBindTexture(GL_TEXTURE_2D, ID) glPixelStorei(GL_UNPACK_ALIGNMENT, 1) # Copy the texture data into the current texture ID glTexImage2D( GL_TEXTURE_2D, 0, 3, ix, iy, 0, GL_RGBA, GL_UNSIGNED_BYTE, image ) """ Note that only the ID is returned, no reference to the image object or the string data is stored in user space, the data is only present within the GL after this call exits. """ return ID
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def load_texture(file_name, x=0, y=0, width=0, height=0, scale=1):\n\n # See if we already loaded this file, and we can just use a cached version.\n if file_name in load_texture.texture_cache:\n return load_texture.texture_cache[file_name]\n\n source_image = PIL.Image.open(file_name)\n\n source_...
[ "0.7242643", "0.71882534", "0.7155117", "0.7069824", "0.70061564", "0.70061564", "0.69536114", "0.68268985", "0.68221575", "0.6817364", "0.6731701", "0.6718896", "0.6691872", "0.6541551", "0.65246266", "0.6514948", "0.6454194", "0.6423829", "0.6403943", "0.63669056", "0.63510...
0.6440027
17
Rendertime texture environment setup
def setupTexture(self): # Configure the texture rendering parameters glEnable(GL_TEXTURE_2D) glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST) glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST) glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_DECAL) # Re-select our texture, could use other generated textures # if we had generated them earlier. glBindTexture(GL_TEXTURE_2D, self.imageID)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def set_environment_texture(self, texture, is_srgb=False):\n # cube_map textures cannot use spherical harmonics\n if texture.cube_map:\n self.AutomaticLightCreationOff()\n # disable spherical harmonics was added in 9.1.0\n if hasattr(self, 'UseSphericalHarmonicsOff'):...
[ "0.61697626", "0.6057704", "0.57795113", "0.5687284", "0.56449056", "0.55825335", "0.5524036", "0.5497377", "0.5459231", "0.5422027", "0.5419896", "0.5402457", "0.5394463", "0.5369188", "0.53376245", "0.5336224", "0.5335161", "0.5335161", "0.5328675", "0.5311985", "0.5309703"...
0.5953139
2
Draw a cube with texture coordinates
def drawCube(self): glBegin(GL_QUADS); glTexCoord2f(0.0, 0.0); glVertex3f(-1.0, -1.0, 1.0); glTexCoord2f(1.0, 0.0); glVertex3f(1.0, -1.0, 1.0); glTexCoord2f(1.0, 1.0); glVertex3f(1.0, 1.0, 1.0); glTexCoord2f(0.0, 1.0); glVertex3f(-1.0, 1.0, 1.0); glTexCoord2f(1.0, 0.0); glVertex3f(-1.0, -1.0, -1.0); glTexCoord2f(1.0, 1.0); glVertex3f(-1.0, 1.0, -1.0); glTexCoord2f(0.0, 1.0); glVertex3f(1.0, 1.0, -1.0); glTexCoord2f(0.0, 0.0); glVertex3f(1.0, -1.0, -1.0); glTexCoord2f(0.0, 1.0); glVertex3f(-1.0, 1.0, -1.0); glTexCoord2f(0.0, 0.0); glVertex3f(-1.0, 1.0, 1.0); glTexCoord2f(1.0, 0.0); glVertex3f(1.0, 1.0, 1.0); glTexCoord2f(1.0, 1.0); glVertex3f(1.0, 1.0, -1.0); glTexCoord2f(1.0, 1.0); glVertex3f(-1.0, -1.0, -1.0); glTexCoord2f(0.0, 1.0); glVertex3f(1.0, -1.0, -1.0); glTexCoord2f(0.0, 0.0); glVertex3f(1.0, -1.0, 1.0); glTexCoord2f(1.0, 0.0); glVertex3f(-1.0, -1.0, 1.0); glTexCoord2f(1.0, 0.0); glVertex3f(1.0, -1.0, -1.0); glTexCoord2f(1.0, 1.0); glVertex3f(1.0, 1.0, -1.0); glTexCoord2f(0.0, 1.0); glVertex3f(1.0, 1.0, 1.0); glTexCoord2f(0.0, 0.0); glVertex3f(1.0, -1.0, 1.0); glTexCoord2f(0.0, 0.0); glVertex3f(-1.0, -1.0, -1.0); glTexCoord2f(1.0, 0.0); glVertex3f(-1.0, -1.0, 1.0); glTexCoord2f(1.0, 1.0); glVertex3f(-1.0, 1.0, 1.0); glTexCoord2f(0.0, 1.0); glVertex3f(-1.0, 1.0, -1.0); glEnd()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def drawCube( self ):\n glBegin(GL_QUADS);\n glTexCoord2f(0.0, 0.0); glVertex3f(-1.0, -1.0, 1.0);\n glTexCoord2f(1.0, 0.0); glVertex3f( 1.0, -1.0, 1.0);\n glTexCoord2f(1.0, 1.0); glVertex3f( 1.0, 1.0, 1.0);\n glTexCoord2f(0.0, 1.0); glVertex3f(-1.0, 1.0, 1.0);\n glTe...
[ "0.8082414", "0.790963", "0.73804325", "0.7115595", "0.7054056", "0.70481575", "0.6607423", "0.6532412", "0.64749116", "0.6449474", "0.6229988", "0.6222093", "0.6187685", "0.61688286", "0.6162942", "0.6152132", "0.6028768", "0.59879607", "0.5984257", "0.59456533", "0.5933637"...
0.80746436
1
Request refresh of the context whenever idle
def OnIdle(self, ): self.triggerRedraw(1) return 1
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def idle(self):\n return", "def on_refresh(self):\n pass", "def idle():", "def _idle(self):\n # self._purge_timedout()\n # ...", "def refresh(self):\n pass", "def refresh(self):\n pass", "def refresh(self):\n\n self._refreshed_on = time.time() * 1000", ...
[ "0.6957455", "0.6568586", "0.65202016", "0.64047533", "0.6379513", "0.6379513", "0.63642436", "0.6357043", "0.6318813", "0.6293459", "0.6293459", "0.6293459", "0.6279487", "0.6272733", "0.6181421", "0.6174326", "0.61006075", "0.60482174", "0.60439646", "0.60439646", "0.604372...
0.5446337
81
Continually read and add messages to the chat.
def _receive_message_loop(self): while True: try: message = self.connection_socket.recv(4096) if len(message) > 0: self.add_message_to_chat(message.decode('utf-8')) sleep(0.2) except ConnectionResetError: # messagebox.showerror("Client dropped", "The other person has dropped from the connection.") self.root.destroy()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def handle_chat(self):\n while True:\n if self.chat_breakout:\n return\n\n time.sleep(1)\n messages = \"\"\n for i in range(5):\n try:\n messages += f\"{self.queue.popleft()}\\n\"\n except IndexError:...
[ "0.69138986", "0.6706282", "0.6597729", "0.65439147", "0.64965916", "0.64251614", "0.6425156", "0.64248693", "0.64159244", "0.6353749", "0.63389343", "0.63240904", "0.6288118", "0.6244579", "0.6240275", "0.621565", "0.61916894", "0.61845356", "0.6172189", "0.610745", "0.61024...
0.5676125
72
Sends the message over the socket and also adds it to the chat.
def _send_message(self, e: Event): message = self.message_text.get("1.0", 'end-1c').replace('\n', "") if len(message) > 0: self.add_message_to_chat('you: ' + message) self._clear_message_text() self.connection_socket.send(bytes('them: ' + message, 'utf-8'))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def sendChatMessage(self, msg):\n self.transport.write(msg)", "def send_message(self, message):\n \n msgPacket = serverbound.play.ChatPacket()\n msgPacket.message = message\n self.connection.write_packet(msgPacket)", "def send_message(self, message):\r\n\t\tself.__tcpSocket.w...
[ "0.8096262", "0.7986446", "0.781855", "0.77682155", "0.7713088", "0.7641384", "0.7619159", "0.75668764", "0.75613767", "0.75334066", "0.7495466", "0.74901545", "0.74558794", "0.74528867", "0.73387706", "0.7315095", "0.72622466", "0.7216694", "0.7187263", "0.7170448", "0.71642...
0.70444596
34
Adds a message to the chat and scrolls down.
def add_message_to_chat(self, message: str): scroll_length = (len(message) // Client.TEXTBOX_CHARACTER_LENGTH) + 1 self.chat_text.config(state=NORMAL) self.chat_text.insert(END, message + '\n') self.chat_text.yview_scroll(scroll_length, "units") self.chat_text.config(state=DISABLED)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def list_add_chat_message(self, chat_message):\n self.chat_messages.append(chat_message)\n\n #logging.info(\"adding message: %s\" % chat_message.message)\n\n if len(self.chat_messages) > ENVIRONMENT['BUFFER_SIZE']:\n self.chat_messages.pop(0)\n\n # alert our polling clients\n...
[ "0.6858262", "0.68089944", "0.6778456", "0.67170316", "0.66270477", "0.65076065", "0.6480786", "0.6332747", "0.6320004", "0.6282979", "0.6253987", "0.6231954", "0.6195", "0.6192585", "0.61682373", "0.61682373", "0.6153406", "0.61134017", "0.6108938", "0.60916406", "0.60653615...
0.799143
0
Creates a data folder containing a 100class subset of ImageNet, then creates a zipped copy of it
def zip_imagenet100c(): #First make sure the directory we are given is correct! if not os.path.isdir(DATA_SRC_ROOT): raise Exception("Bad filepath given") #create the destiantion directories if they don't exist if not os.path.isdir(IMAGENET100_DIR): os.mkdir(IMAGENET100_DIR) #grab the subset wnids for the 100 class-subset with open(IMAGENET100_CLASSES) as f: subset_wnids = f.readlines() subset_wnids = [x.strip() for x in subset_wnids] #list of the 100 WNIDs we grab #Grab the names of all of the folders inside the root data source #Structure is distortion/sub_distortion/level/wnids for distortion in os.listdir(DATA_SRC_ROOT): if distortion != "meta.bin": print(distortion) folder_path = os.path.join(DATA_SRC_ROOT, distortion) if not os.path.isdir(folder_path): continue for sub_distortion in os.listdir(folder_path): print(sub_distortion) subfolder_path = os.path.join(folder_path, sub_distortion) if not os.path.isdir(subfolder_path): continue for level in os.listdir(subfolder_path): print(level) level_path = os.path.join(subfolder_path, level) #grab the correcrt validation d9recotires for wnid in os.listdir(level_path): wnid_path = os.path.join(level_path, wnid) if not os.path.isdir(wnid_path): continue if wnid in subset_wnids: dest_path = os.path.join(IMAGENET100_DIR, distortion, sub_distortion, level, wnid) shutil.copytree(wnid_path, dest_path) #copy the metadata bin file meta_file = os.path.join(DATA_SRC_ROOT, 'meta.bin') meta_dest = os.path.join(IMAGENET100_DIR, 'meta.bin') shutil.copy(meta_file, meta_dest) #Zip the destinatio file shutil.make_archive(ZIP_PATH + '/ImageNet100C', 'tar', IMAGENET100_DIR)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_dataset(data_folder: str, dataset_file: str, targets_file: str = os.path.join('data', 'targets.pkl')):\n files = sorted(glob.glob(os.path.join(data_folder, '**/*.jpg'), recursive=True))\n images = []\n crop_sizes = []\n crop_centers = []\n targets = []\n for image in tqdm(files, desc='...
[ "0.6529704", "0.6481513", "0.6435279", "0.6418067", "0.63397163", "0.6227401", "0.62035155", "0.61983734", "0.6165103", "0.61225355", "0.612228", "0.60964", "0.60848254", "0.6073827", "0.60563177", "0.6047265", "0.60234", "0.6019958", "0.60174394", "0.6004536", "0.5983722", ...
0.7347322
0
Given a wav file, use Praat to return a dictionary containing pitch (in Hz) at each millisecond.
def praat_analyze_pitch(audio_file): praatpath = path.abspath('Praat.app/Contents/MacOS/Praat') # locate Praat executable pl = PraatLoader(praatpath=praatpath) # create instance of PraatLoader object praat_output = pl.run_script('pitch.praat', audio_file) # run pitch script in Praat pitch_data = pl.read_praat_out(praat_output) # turn Praat's output into Python dict return pitch_data
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def read_pitch_from_sound_file(filename: str, samplerate: int = DEFAULT_SAMPLE_RATE):\n if os.path.isfile(filename) is False:\n raise Exception('File not found with filename = %s' % filename)\n\n print(\"====> reading pitch from sound file\")\n win_s = 4096 // DOWN_SAMPLE # fft size\n hop_s = 5...
[ "0.70273054", "0.70135546", "0.68500084", "0.6821027", "0.6812894", "0.6796246", "0.65991676", "0.6327907", "0.6325302", "0.62763965", "0.62052196", "0.61778903", "0.60919714", "0.60821027", "0.60718834", "0.60426664", "0.59239006", "0.59204537", "0.5904547", "0.58931905", "0...
0.6584172
7
Clean up the dictionary returned by praatinterface, put it in the format needed for graphing, smooth data by reducing number of datapoints, and return it as JSON.
def format_pitch_data(pd): for t in pd.keys(): pd[t] = pd[t]['Pitch'] # make each value just the pitch, instead of a sub-dict if pd[t] == 0: del pd[t] # if pitch is 0, remove from dictionary # now, pd is dict where each key is time (x value) and each value is pitch (y value) # to format for graph input, make list of dicts containing x-y pairs datapoints_list = [] for t in pd.keys(): datapoint = {} datapoint["x"] = t datapoint["y"] = pd[t] datapoints_list.append(datapoint) # sort the list by the value of "x" datapoints_sorted = sorted(datapoints_list, key=itemgetter("x")) # with this sorted list, do some data smoothing # pull out every nth item i = 0 datapoints_keep = [] while i < len(datapoints_sorted): datapoints_keep.append(datapoints_sorted[i]) i += 50 # make sure last item is included so length of curve isn't lost datapoints_keep.append(datapoints_sorted[-1]) # print "num of datapoints:", len(datapoints_keep) # print datapoints_keep[:100] return json.dumps(datapoints_keep, sort_keys=True)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_json(self):\n data = {}\n data['ip'] = self.ip\n\n try:\n data['country'] = self.processedvtdata[\"country\"]\n except KeyError:\n data['country'] = 'None'\n try:\n data['as'] = self.processedvtdata[\"as_owner\"]\n except KeyError:\...
[ "0.65398073", "0.63518214", "0.6286521", "0.61414844", "0.6091616", "0.6031922", "0.6015514", "0.6000563", "0.59230435", "0.59183973", "0.590805", "0.5886959", "0.5885497", "0.5877358", "0.5861081", "0.58608174", "0.58516765", "0.58342355", "0.57904774", "0.5777289", "0.57518...
0.5780186
19
Show the popup and return True if accepted, False if canceled.
def popup(self): return self.exec_() == QDialog.Accepted
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def verify_popup(self, type):", "def _show_popup(self) -> None:\n\n top = tk.Toplevel()\n email_list_len = len(self.get_recipients())\n msg = tk.messagebox.askquestion('Confirm send emails', 'Are you sure you want to email {} client{}?'\n .format(email_...
[ "0.6642623", "0.6464855", "0.63802284", "0.62683916", "0.6240332", "0.6218975", "0.6172441", "0.61592615", "0.6059267", "0.5996099", "0.5937343", "0.5933337", "0.5915153", "0.5883959", "0.5883959", "0.5883959", "0.58716357", "0.58673215", "0.5851311", "0.5851311", "0.5847941"...
0.79470927
0
Fill the heavy metal unit labels with the selected unit.
def set_hm_unit_display(self): units = str(self.entries['units'].combobox.currentText()) self.ui.is_unitL1.setText(units) self.ui.is_unitL2.setText(units) self.ui.is_unitL3.setText(units) self.ui.is_unitL4.setText(units)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def unit_label(self, unit_label):\n\n self._unit_label = unit_label", "def unit_label(self, unit_label):\n\n self._unit_label = unit_label", "def unit_label(self, unit_label):\n\n self._unit_label = unit_label", "def update_units(self):\n unit_var_value = self.view.vars['unit'].ge...
[ "0.65493584", "0.65493584", "0.65493584", "0.61209", "0.5876205", "0.5848301", "0.5719211", "0.56192005", "0.56159955", "0.5609573", "0.55953413", "0.55090725", "0.5500092", "0.54777503", "0.54361576", "0.5392763", "0.5386514", "0.53529835", "0.5319486", "0.53029513", "0.5297...
0.6863121
0
Seek to the given time.
def seek_to(self, ms): self.proxy.seek_to(ms)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def seek(self, time):\n command = 'seek ' + str(time)\n self.run_command(command)", "def seek(self, time: int):\n self._select_interface(self._rc_seek, self._http_seek, time)", "def _seek(self, time_offset):\n if (time.time() - (self.time_start + self.time_offset)) < 0.1:\n log.i...
[ "0.7862459", "0.73034114", "0.69887996", "0.6565575", "0.64587533", "0.6248209", "0.5942373", "0.58812505", "0.5810669", "0.5806339", "0.5791968", "0.577806", "0.577806", "0.57382905", "0.5731552", "0.57055503", "0.56437117", "0.5611064", "0.55878776", "0.5579256", "0.5579256...
0.65188617
4
Create the task on the server
def create(self, server): if len(self.geometries) == 0: raise Exception('no geometries') return server.post( 'task_admin', self.as_payload(), replacements={ 'slug': self.__challenge__.slug, 'identifier': self.identifier})
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_task():", "def create_task(self, name, value):\n pass", "def create_task():\n client = RequestManager()\n client.set_method(\"POST\")\n client.set_endpoint(\"/projects/{0}/stories/{1}/tasks\".format(STORED_ID['project_id'], STORED_ID['story_id']))\n name = \"\".joi...
[ "0.8675896", "0.75931495", "0.7256835", "0.71343005", "0.7059578", "0.70504224", "0.70264506", "0.7024704", "0.69823843", "0.69811654", "0.695801", "0.69416815", "0.68903977", "0.68586636", "0.6837721", "0.6830297", "0.6803687", "0.67863655", "0.67849886", "0.6761924", "0.674...
0.62833476
64
Update existing task on the server
def update(self, server): return server.put( 'task_admin', self.as_payload(), replacements={ 'slug': self.__challenge__.slug, 'identifier': self.identifier})
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def update_task(self, task):\n create = task.id == 0\n\n xml = self._serialise_task(task)\n\n method = ['PUT','POST'][create]\n\n if create:\n url = \"%s/tasks?%s\" % \\\n (self._get_base_url(), self._get_url_params())\n else:\n url = \"%s/t...
[ "0.8158114", "0.8152909", "0.76661485", "0.7663534", "0.7616725", "0.7550435", "0.7534657", "0.7525693", "0.74830467", "0.7400476", "0.73755723", "0.7304954", "0.7256344", "0.7224427", "0.7218338", "0.7215386", "0.7203241", "0.718519", "0.7181675", "0.7085396", "0.70829463", ...
0.6991943
24
Check if a task exists on the server
def exists(self, server): try: server.get( 'task', replacements={ 'slug': self.__challenge__.slug, 'identifier': self.identifier}) except Exception: return False return True
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __check_if_task_exists(self, server_id):\n if server_id in self.__migrating_tasks.keys():\n return True\n return False", "def exists_task(self, task):\n assert task, \"Must input a valid task name.\"\n return any(self.get_by_task(task))", "def isTasksExists(request):...
[ "0.77683973", "0.7662436", "0.74845606", "0.73977894", "0.7334475", "0.68549156", "0.68336856", "0.6772", "0.67269367", "0.6711562", "0.67005736", "0.6666161", "0.6534565", "0.64875007", "0.6391127", "0.6377171", "0.6374866", "0.63482434", "0.63294256", "0.6309257", "0.629597...
0.8097337
0
Retrieve a task from the server
def from_server(cls, server, slug, identifier): task = server.get( 'task', replacements={ 'slug': slug, 'identifier': identifier}) return cls(**task)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_task(self):\n\n url='{url}/task'.format(url=config.SERVER_URL)\n\n try:\n res=request.urlopen(url,timeout=10).read()\n res=str(res,encoding='utf8')\n except Exception as e:\n check_server() # sleep until server is available\n try:\n ...
[ "0.79089105", "0.77541333", "0.7590145", "0.73984325", "0.7325193", "0.73121446", "0.72696733", "0.7252774", "0.7249014", "0.72433484", "0.7215614", "0.7204756", "0.71764857", "0.7148466", "0.7100719", "0.7094087", "0.70633936", "0.7023238", "0.69586605", "0.6954439", "0.6948...
0.0
-1
Create a task from JSON
def from_payload(cls, payload): return cls(**payload)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def from_json(json):\n\n if \"description\" not in json or \"tags\" not in json or \"files\" not in json:\n raise InvalidJSONException(\"Task must have a description, a list of tags and a list of files\")\n\n if type(json[\"description\"]) != str:\n raise InvalidJSONException(\"...
[ "0.8003312", "0.7318135", "0.7193144", "0.7155828", "0.7070071", "0.70066273", "0.6857383", "0.6823616", "0.6789037", "0.66410047", "0.6638342", "0.65754247", "0.6503764", "0.6498993", "0.6407582", "0.6403449", "0.63990456", "0.6354787", "0.6262863", "0.62520087", "0.6243814"...
0.0
-1
Initializes a new MuJoCo environment.
def __init__(self, model_path: str, frame_skip: int, camera_settings: Optional[Dict] = None, ): self._seed() if not os.path.isfile(model_path): raise IOError( '[MujocoEnv]: Model path does not exist: {}'.format(model_path)) self.frame_skip = frame_skip self.sim_robot = MujocoSimRobot( model_path, camera_settings=camera_settings) self.sim = self.sim_robot.sim self.model = self.sim_robot.model self.data = self.sim_robot.data self.metadata = { 'render.modes': ['human', 'rgb_array', 'depth_array'], 'video.frames_per_second': int(np.round(1.0 / self.dt)) } self.mujoco_render_frames = False self.init_qpos = self.data.qpos.ravel().copy() self.init_qvel = self.data.qvel.ravel().copy() observation, _reward, done, _info = self.step(np.zeros(self.model.nu)) assert not done bounds = self.model.actuator_ctrlrange.copy() act_upper = bounds[:, 1] act_lower = bounds[:, 0] # Define the action and observation spaces. # HACK: MJRL is still using gym 0.9.x so we can't provide a dtype. try: self.action_space = spaces.Box( act_lower, act_upper, dtype=np.float32) if isinstance(observation, collections.Mapping): self.observation_space = spaces.Dict({ k: spaces.Box(-np.inf, np.inf, shape=v.shape, dtype=np.float32) for k, v in observation.items()}) else: self.obs_dim = np.sum([o.size for o in observation]) if type(observation) is tuple else observation.size self.observation_space = spaces.Box( -np.inf, np.inf, observation.shape, dtype=np.float32) except TypeError: # Fallback case for gym 0.9.x self.action_space = spaces.Box(act_lower, act_upper) assert not isinstance(observation, collections.Mapping), 'gym 0.9.x does not support dictionary observation.' self.obs_dim = np.sum([o.size for o in observation]) if type(observation) is tuple else observation.size self.observation_space = spaces.Box( -np.inf, np.inf, observation.shape)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __init__(self, env):\n gym.Wrapper.__init__(self, env)", "def __init__(self, env):\n gym.Wrapper.__init__(self, env)", "def __init__(self): \n\t\n\t # get the environment\n\t\tself.env = env()", "def initialize():\n environment = Environment()\n environment.setup()", "def __init...
[ "0.65531933", "0.65531933", "0.6540199", "0.6425967", "0.6125439", "0.60250366", "0.5981867", "0.59326357", "0.58881015", "0.5858973", "0.5796893", "0.5779154", "0.5775181", "0.57644004", "0.5762341", "0.5751086", "0.5751086", "0.5721603", "0.5721603", "0.567534", "0.56694514...
0.5511559
43
Reset the robot degrees of freedom (qpos and qvel). Implement this in each subclass.
def reset_model(self): raise NotImplementedError
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def reset(self):\n self._position = TwoDV(0.0, 0.0)\n self._orient = TNavigator.START_ORIENTATION[self._mode]", "def specific_reset(self) -> None:\n self.agent.specific_reset() # reset joints\n new_pos = self.agent.init_xyz\n new_pos[:2] = np.random.uniform(-0.01, 0.01, 2)\n ...
[ "0.6537879", "0.6535229", "0.64604867", "0.64147246", "0.6386366", "0.6382191", "0.6351365", "0.62435204", "0.623713", "0.6203848", "0.6164331", "0.61403173", "0.612164", "0.61177164", "0.6117006", "0.60989344", "0.6085719", "0.6066662", "0.60641676", "0.60370797", "0.6028802...
0.0
-1
Backwards compatibility with MJRL.
def mj_render(self): self.render(mode='human')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __call__(self) -> None:", "def moi(self):\n\n pass", "def __call__(self):\n raise NotImplementedError", "def __call__(self):\n\t\treturn", "def use(self):", "def support(self):", "def lro(self) -> global___Snippet.Lro:", "def regular(self):", "def __call__():", "def __call__():"...
[ "0.62065375", "0.59999824", "0.59971726", "0.5977466", "0.5968731", "0.59595406", "0.58379644", "0.5770757", "0.5765837", "0.5765837", "0.5765837", "0.5765837", "0.5765837", "0.57506377", "0.57506377", "0.570763", "0.5704474", "0.5680616", "0.5670139", "0.5652243", "0.5639114...
0.0
-1
Loads class labels at 'path'
def load_class_ck(path): fp = open(path, "r",encoding="utf-8") names = fp.read().split("\n")[:-1] return names
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def load_labels(self, pathLabel):\n self.pathLabel = pathLabel\n self.labelList = os.listdir(pathLabel)", "def load_label(path: str) -> dict:\n if not os.path.exists(path):\n print(f\"Warning, try to load non-exist label {path}\")\n return None\n return np.load(path, allow_pickl...
[ "0.76263887", "0.70822763", "0.6987063", "0.6917422", "0.6886614", "0.684222", "0.6831526", "0.67475176", "0.67435664", "0.6732817", "0.66995424", "0.6691952", "0.6691952", "0.6657494", "0.66031843", "0.6593739", "0.65641505", "0.649268", "0.6491007", "0.6329085", "0.6310958"...
0.6649412
14
The function reads the antenna positions (N_ant antennas) from the file given.
def from_antenna_config(filename, z, nu=None): antll = np.loadtxt(filename) Re = 6.371e6 # in m pp = np.pi/180 if not nu: nu = c2t.z_to_nu(z) # MHz antxyz = np.zeros((antll.shape[0],3)) # in m antxyz[:,0] = Re*np.cos(antll[:,1]*pp)*np.cos(antll[:,0]*pp) antxyz[:,1] = Re*np.cos(antll[:,1]*pp)*np.sin(antll[:,0]*pp) antxyz[:,2] = Re*np.sin(antll[:,1]*pp) del pp, antll N_ant = antxyz.shape[0] Nbase = np.zeros((N_ant*(N_ant-1)/2,3)) pair_comb = itertools.combinations(xrange(N_ant), 2) pair_comb = list(pair_comb) lam = c_light/(nu*1e6)/1e2 # in m for i in xrange(Nbase.shape[0]): ii,jj = pair_comb[i] ux = (antxyz[ii,0]-antxyz[jj,0])/lam uy = (antxyz[ii,1]-antxyz[jj,1])/lam uz = (antxyz[ii,2]-antxyz[jj,2])/lam Nbase[i,:] = ux,uy,uz return Nbase, N_ant
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def read_positions():\n return np.genfromtxt(\"POSITIONS.OUT\").transpose()", "def readInput(in_file_name):\n in_file = open(in_file_name, 'r')\n positions = []\n samples = []\n M = []; P = [];\n MC = []; PC = [];\n while True:\n line = in_file.readline()\n if not line: break\n ...
[ "0.6724647", "0.6426691", "0.6014739", "0.59488297", "0.59237134", "0.5912095", "0.5865451", "0.5829912", "0.58188653", "0.58121365", "0.58050466", "0.5768979", "0.5765536", "0.5717999", "0.5706781", "0.56984544", "0.5671126", "0.565585", "0.5651771", "0.56221503", "0.5617779...
0.56716186
16
The rotation of the earth over the observation times makes changes the part of the sky measured by each antenna.
def earth_rotation_effect(Nbase, slice_num, int_time, declination=30.): p = np.pi/180. delta = p*declination k = slice_num HA =-15.0*p*(k-1)*int_time/(3600.0) - np.pi/180.0*90.0 + np.pi/180.0*360.0 new_Nbase = np.zeros(Nbase.shape) new_Nbase[:,0] = np.sin(HA)*Nbase[:,0] + np.cos(HA)*Nbase[:,1] new_Nbase[:,1] = -1.0*np.sin(delta)*np.cos(HA)*Nbase[:,0] + np.sin(delta)*np.sin(HA)*Nbase[:,1] + np.cos(delta)*Nbase[:,2] new_Nbase[:,2] = np.cos(delta)*np.cos(HA)*Nbase[:,0] - np.cos(delta)*np.sin(HA)*Nbase[:,1] + np.sin(delta)*Nbase[:,2] return new_Nbase
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def rotate_orbit(self):\n try:\n ang = self.orbit_speed * self.time_scale / self.refresh_rate\n self.obj.rotate(angle=ang, axis=vector(0, 1, 0), origin=self.star.obj.pos)\n self.sum_ang += ang\n except ZeroDivisionError:\n print(\"ERROR: REFRESH_RATE is 0\"...
[ "0.6289923", "0.62318367", "0.6067944", "0.60519284", "0.59558403", "0.59422225", "0.5937349", "0.5936226", "0.59156203", "0.5899485", "0.5844154", "0.5770054", "0.576213", "0.5754233", "0.57514757", "0.5723362", "0.5705267", "0.56774634", "0.5661177", "0.5657915", "0.5656137...
0.6019258
4
The radio telescopes observe the sky for 'total_int_time' hours each day. The signal is recorded every 'int_time' seconds.
def daily_observation(z, ncells, filename, total_int_time=4., int_time=10., boxsize=None, declination=30.): Nbase, N_ant = from_antenna_config(filename, z) uv_map0 = get_uv_coverage(Nbase, z, ncells, boxsize=boxsize) uv_map = np.zeros(uv_map0.shape) tot_num_obs = int(3600.*total_int_time/int_time) for i in xrange(tot_num_obs-1): new_Nbase = earth_rotation_effect(Nbase, i+1, int_time, declination=declination) uv_map1 = get_uv_coverage(new_Nbase, z, ncells, boxsize=boxsize) uv_map += uv_map1 print i uv_map = (uv_map+uv_map1)/tot_num_obs return uv_map, N_ant
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "async def set_integration_time(self, int_time: int):\n return await self.hw_device.integration_time(int_time)", "def setIntegrationTime(self,t_int):\n \n acc_len = self._adcClock*1e6*t_int/(1024.0) \n if acc_len > 65536:\n raise(\"Integration time is too long:\",t_int)...
[ "0.5954411", "0.5676104", "0.55020225", "0.52751565", "0.49494395", "0.49394286", "0.4906087", "0.4901808", "0.48571083", "0.48567733", "0.4848392", "0.48170355", "0.48030984", "0.47985277", "0.47943923", "0.4758018", "0.47353464", "0.47334984", "0.472768", "0.47261432", "0.4...
0.0
-1
It calculated the uv_map for the uvcoverage.
def get_uv_coverage(Nbase, z, ncells, boxsize=None): if not boxsize: boxsize = c2t.conv.LB uv_map = np.zeros((ncells,ncells)) theta_max = c2t.conv.LB/c2t.z_to_cdist(z) for p in xrange(Nbase.shape[0]): i,j,k = np.round(Nbase[p,0]*theta_max),np.round(Nbase[p,1]*theta_max),np.round(Nbase[p,2]*theta_max) if np.abs(i)<ncells: if np.abs(j)<ncells: uv_map[int(i),int(j)] += 1 return uv_map
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def uvmap(self, p):\n pass", "def uvmap(self, p):\n # bottom left corner of the plane\n p00 = self.position - (self.sx * self.n0) / 2 - (self.sy * self.n1) / 2\n dif_vector = p - p00\n u = np.dot(dif_vector, self.n0) / self.sx\n v = np.dot(dif_vector, self.n1) / self.sy\...
[ "0.7532845", "0.6147069", "0.59362704", "0.59166545", "0.59061474", "0.5892625", "0.55217695", "0.5503293", "0.54990137", "0.5495749", "0.54743665", "0.5441941", "0.54397714", "0.54297936", "0.5418282", "0.53442216", "0.5343465", "0.5342541", "0.5336867", "0.5311103", "0.5292...
0.6111474
2
It calculates the rms of the noise added by the interferrometers of ska.
def kanan_noise_image_ska(z, uv_map, depth_mhz, obs_time, N_ant_ska=564.): nuso = 1420.0/(1.0 + z) delnu = depth_mhz*1e3 # in kHz effective_baseline = np.sum(uv_map) T_sys_atnu300MHz= 60.0 #K T_sys = T_sys_atnu300MHz*(300.0/nuso)**2.55 ant_radius_ska = 35./2. #in m A_ant_ska = np.pi*ant_radius_ska*ant_radius_ska sigma = np.sqrt(2.0)*KB_SI*(T_sys/A_ant_ska)/np.sqrt((depth_mhz*1e6)*(obs_time*3600.0))/janskytowatt*1e3/np.sqrt(N_ant_ska*N_ant_ska/2.0) ## in mJy rms_noi = np.sqrt(2.0)*KB_SI/janskytowatt/1e3/600. *(T_sys/100.0)*(100.0/A_ant_ska)* np.sqrt(1000.0/delnu)*np.sqrt(100.0/obs_time)*1e3 sigma *= 1e3 #in muJy rms_noi *= 1e3 print 'Expected: rms in image in muJy per beam for full =', sigma print 'Effective baseline =', sigma*np.sqrt(N_ant_ska*N_ant_ska/2.0)/np.sqrt(effective_baseline), 'm' print 'Calculated: rms in the visibility =', rms_noi, 'muJy' return sigma, rms_noi
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def calculate_desired_noise_rms(clean_rms, snr):\n a = float(snr) / 20\n noise_rms = clean_rms / (10 ** a)\n return noise_rms", "def noiseReduction(self):\n pass", "def __mix_with_snr(self, sig_spk, sig_noise, need_snr):\n\n # Calc SNR\n pow_sp = np.sum((sig_spk) ** 2) / float(len...
[ "0.7203031", "0.7082178", "0.6568871", "0.6568871", "0.6404555", "0.6361032", "0.6209166", "0.6132606", "0.61150664", "0.60341376", "0.6025671", "0.60177475", "0.6004052", "0.6003546", "0.5993322", "0.5989995", "0.5975633", "0.5961862", "0.59155107", "0.59027416", "0.5901672"...
0.59576935
18
In the initialization of the blanc canvas the number of rows and columns can be defined, finally the canvas is placed into the parent frame.
def __init__(self, parent, rows=1, columns=1, set_grid_propagate=False, *args, **kwargs): super().__init__(parent, highlightthickness=0, *args, **kwargs) self.grid_propagate(set_grid_propagate) self.set_num_of_rows(rows) self.set_num_of_columns(columns) self.grid(sticky="NESW")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def setup_canvas(self):\n # create frame to contain canvas\n self.world_container = tk.Frame(self,\n width = self.world_size[1],\n height = self.world_size[0])\n self.world_container.grid(row = 1, column = 0, sticky ...
[ "0.7390701", "0.7325227", "0.7284718", "0.7152517", "0.7130581", "0.7130581", "0.7130581", "0.70996016", "0.6822051", "0.6757563", "0.67494226", "0.66495013", "0.66449696", "0.6612262", "0.66097903", "0.65810424", "0.6549738", "0.6528398", "0.6521471", "0.64550036", "0.644386...
0.6334319
26
Approximate the 95% confidence interval for Student's T distribution. Given the degrees of freedom, returns an approximation to the 95% confidence interval for the Student's T distribution.
def tdist95conf_level(df): df = int(round(df)) highest_table_df = len(_T_DIST_95_CONF_LEVELS) if df >= 200: return 1.960 if df >= 100: return 1.984 if df >= 80: return 1.990 if df >= 60: return 2.000 if df >= 50: return 2.009 if df >= 40: return 2.021 if df >= highest_table_df: return _T_DIST_95_CONF_LEVELS[highest_table_df - 1] return _T_DIST_95_CONF_LEVELS[df]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def confidence_at_95tpr(self):\r\n\r\n return self.confidence_at_tpr(0.95)", "def t_confidence_Interval_Difference_Of_Means(xSamples, ySamples, confidence):\n try:\n if len(xSamples) >= 30 or len(ySamples) >= 30:\n raise sampleSizeError(\"Should use normal distribution instead. m or n...
[ "0.7058479", "0.6177722", "0.60270077", "0.601931", "0.58983326", "0.58310044", "0.57847005", "0.5780554", "0.5745698", "0.5712912", "0.5690209", "0.56681216", "0.56415606", "0.561999", "0.56135404", "0.56095594", "0.55160135", "0.54769456", "0.547189", "0.5418613", "0.540857...
0.53884614
23
Find the pooled sample variance for two samples.
def pooled_sample_variance(sample1, sample2): deg_freedom = len(sample1) + len(sample2) - 2 mean1 = statistics.mean(sample1) squares1 = ((x - mean1) ** 2 for x in sample1) mean2 = statistics.mean(sample2) squares2 = ((x - mean2) ** 2 for x in sample2) return (math.fsum(squares1) + math.fsum(squares2)) / float(deg_freedom)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def variance(self):\r\n\t\t_mean = sum(self.sample)/len(self.sample)\r\n\t\treturn sum(map(lambda x: (x - _mean)**2, self.sample))/(len(self.sample) - 1)", "def variance( values, sample=False ):\n mean_val = mean_value( values )\n n_val = len( values ) -1 if sample else len( values )\n return sum( [ j**...
[ "0.69535667", "0.68496686", "0.67732096", "0.6738547", "0.67180645", "0.65582496", "0.6516183", "0.64949", "0.6486372", "0.6415831", "0.6393349", "0.639173", "0.63470465", "0.63140863", "0.6311792", "0.6310086", "0.6282926", "0.62156796", "0.62076354", "0.61959153", "0.604445...
0.80888563
0
Calculate a ttest score for the difference between two samples.
def tscore(sample1, sample2): if len(sample1) != len(sample2): raise ValueError("different number of values") error = pooled_sample_variance(sample1, sample2) / len(sample1) diff = statistics.mean(sample1) - statistics.mean(sample2) return diff / math.sqrt(error * 2)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def ttest(array1, array2):\n diff = np.mean(array1) - np.mean(array2)\n if diff < c.cart_p60:\n return c.low_score\n if array1.size <= 1 or array2.size <= 1:\n return min(diff, c.single_item_cart_max)\n return 1 - ttest_ind(array1, array2, equal_var=False).pvalue\n # return diff", "d...
[ "0.75622785", "0.6732271", "0.67147094", "0.67101526", "0.66661763", "0.64419353", "0.6416758", "0.64082766", "0.63968855", "0.6324951", "0.6282402", "0.6253952", "0.6186208", "0.61227715", "0.6105249", "0.60951906", "0.5997991", "0.59836954", "0.59664416", "0.5946647", "0.59...
0.7724763
0
Determine whether two samples differ significantly. This uses a Student's twosample, twotailed ttest with alpha=0.95.
def is_significant(sample1, sample2): deg_freedom = len(sample1) + len(sample2) - 2 critical_value = tdist95conf_level(deg_freedom) t_score = tscore(sample1, sample2) return (abs(t_score) >= critical_value, t_score)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def ttest_review(sample_1, sample_2, alpha=.05):\n\n result = stats.ttest_ind(sample_1, sample_2)\n crit_val, p_val = result\n \n ## Creating interpretation based on p-value results.\n\n if p_val < .05:\n print(f'The feature is statistically significant with a p-value of {p_val}.')\n\n els...
[ "0.6815539", "0.6802423", "0.67800075", "0.67102456", "0.66998667", "0.6637655", "0.66326714", "0.6597813", "0.6578872", "0.6518927", "0.64484495", "0.64479405", "0.63393015", "0.6331243", "0.61787146", "0.6156587", "0.6153929", "0.61489004", "0.61462283", "0.61424893", "0.61...
0.6690653
5
Get status summary of given transport node.
def get_aggregation_status(cls, client_obj, get_aggregation_status=None): attr_map = {'node_uuid': 'uuid', 'bfd_admin_down_count': 'admin_down_count', 'bfd_init_count': 'init_count', 'bfd_up_count': 'up_count', 'bfd_down_count': 'down_count'} node_id = client_obj.id_ # TODO(gangarm): Check if we can use a better name in product sdk for # param_1_id, which is essentially node id. client_class_obj = gettransportnodestatus.GetTransportNodeStatus( connection_object=client_obj.connection, param_1_id=node_id) status_schema_object = client_class_obj.read() status_schema_dict = status_schema_object.get_py_dict_from_object() mapped_dict = utilities.map_attributes(attr_map, status_schema_dict) result_dict = dict() result_dict['response'] = mapped_dict result_dict['response_data'] = dict() result_dict['response_data']['status_code'] = ( client_class_obj.last_calls_status_code) return result_dict
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getNodeStatus(self,node):\n data = self.connect('get','nodes/%s/status' % (node),None)\n return data", "def getNodeTaskStatusByUPID(self,node,upid):\n data = self.connect('get','nodes/%s/tasks/%s/status' % (node,upid),None)\n return data", "def status(self):\n url = API_P...
[ "0.7329152", "0.6466141", "0.6326778", "0.63179845", "0.6115346", "0.608754", "0.60856014", "0.6035681", "0.6000012", "0.59930164", "0.59930164", "0.5987499", "0.5987269", "0.5976228", "0.596927", "0.5969141", "0.5904546", "0.5899778", "0.5893478", "0.5890255", "0.58680993", ...
0.0
-1
return autsizeable field names in idfobject
def autosize_fieldname(idfobject): # undocumented stuff in this code return [ fname for (fname, dct) in zip(idfobject.objls, idfobject["objidd"]) if "autosizable" in dct ]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def field_names(self):\n ...", "def objectFields(self):\n raise NotImplementedError", "def fields(self):", "def _fields_names(cls) -> List:\n return list(field.name for field in dataclasses.fields(cls))", "def fields(self):\n ...", "def get_field_names(self):\n return {rv[0...
[ "0.7096624", "0.7052038", "0.68935066", "0.66643006", "0.6553258", "0.6494789", "0.6485936", "0.6427995", "0.64223516", "0.63584894", "0.6328418", "0.63140696", "0.62944096", "0.62799215", "0.6263371", "0.62606794", "0.6165774", "0.6162808", "0.61518234", "0.611477", "0.61031...
0.765945
0
Checks whether the given ISBN10 code is valid. >>> isISBN10('9971502100') True >>> isISBN10('9971502108') False
def isISBN10(code): # helper function for computing ISBN-10 check digit def check_digit(code): # compute check digit check = sum((i + 1) * int(code[i]) for i in range(9)) % 11 # convert check digit into its string representation return 'X' if check == 10 else str(check) # check whether given code is a string if not isinstance(code, str): return False # check whether given code contains 10 characters if len(code) != 10: return False # check whether first nine characters of given code are digits if not code[:9].isdigit(): return False # check the check digit return check_digit(code) == code[-1]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def isISBN(code):\n if not (\n isinstance(code, str) and # code must be a string\n len(code) == 10 and # code must contain 10 characters\n code[:9].isdigit() # first nine characters must be digits\n ):\n return False\n\n # check the check digit\n return check...
[ "0.82796675", "0.8265154", "0.7875872", "0.76414096", "0.7556362", "0.734542", "0.73319346", "0.72045577", "0.7138764", "0.7138109", "0.704493", "0.64504737", "0.6407236", "0.6328351", "0.63012135", "0.6227752", "0.6151876", "0.60880727", "0.60821617", "0.60790056", "0.601290...
0.8820598
0
Checks whether the given ISBN13 code is valid. >>> isISBN13('9789743159664') True >>> isISBN13('9787954527409') False >>> isISBN13('8799743159665') False
def isISBN13(code): # helper function for computing ISBN-10 check digit def check_digit(code): # compute check digit check = sum((3 if i % 2 else 1) * int(code[i]) for i in range(12)) # convert check digit into a single digit return str((10 - check) % 10) # check whether given code is a string if not isinstance(code, str): return False # check whether given code contains 10 characters if len(code) != 13: return False # check whether first nine characters of given code are digits if not code[:12].isdigit(): return False # check the check digit return check_digit(code) == code[-1]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def is_isbn_13(isbn13):\r\n isbn13 = re.sub(r'[^0-9X]', '', isbn13.replace('x', 'X'))\r\n if len(isbn13) != 13: return False\r\n if isbn13[0:3] not in ('978', '979'): return False\r\n return False if isbn_13_check_digit(isbn13[:-1]) != isbn13[-1] else True", "def isISBN(code, isbn13=True):\n\n ret...
[ "0.8294502", "0.8156061", "0.7813687", "0.7593365", "0.7539352", "0.7096975", "0.70109516", "0.6998053", "0.6760535", "0.6702344", "0.6442007", "0.64231825", "0.6403078", "0.6313602", "0.6305555", "0.6260889", "0.6107894", "0.5969835", "0.5896946", "0.57390374", "0.56742215",...
0.86895674
0
>>> isISBN('9789027439642', False) False >>> isISBN('9789027439642', True) True >>> isISBN('9789027439642') True >>> isISBN('080442957X') False >>> isISBN('080442957X', False) True
def isISBN(code, isbn13=True): return isISBN13(code) if isbn13 else isISBN10(code)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def is_isbn(val):\n if is_isbn10(val) or is_isbn13(val):\n if val[0:3] in [\"978\", \"979\"] or not is_ean13(val):\n return True\n return False", "def isISBN(code):\n if not (\n isinstance(code, str) and # code must be a string\n len(code) == 10 and # code must co...
[ "0.7865637", "0.7720685", "0.7428631", "0.7180673", "0.71515095", "0.7052291", "0.68789357", "0.6845327", "0.6841594", "0.6841162", "0.6837004", "0.6813746", "0.68048567", "0.675236", "0.66869426", "0.6474136", "0.64726096", "0.64508146", "0.640185", "0.6327851", "0.621259", ...
0.7591935
2
>>> codes = ['0012345678', '0012345679', '9971502100', '080442957X', 5, True, 'The Practice of Computing Using Python', '9789027439642', '5486948320146'] >>> areISBN(codes) [False, True, True, True, False, False, False, True, False] >>> areISBN(codes, True) [False, False, False, False, False, False, False, True, False] >>> areISBN(codes, False) [False, True, True, True, False, False, False, False, False]
def areISBN(codes, isbn13=None): # initialize list of checks checks = [] # construct list of checks for code in codes: if isinstance(code, str): if isbn13 is None: checks.append(isISBN(code, len(code) == 13)) else: checks.append(isISBN(code, isbn13)) else: checks.append(False) # return list of checks return checks
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def is_isbn(val):\n if is_isbn10(val) or is_isbn13(val):\n if val[0:3] in [\"978\", \"979\"] or not is_ean13(val):\n return True\n return False", "def isISBN(code, isbn13=True):\n\n return isISBN13(code) if isbn13 else isISBN10(code)", "def isISBN(code):\n if not (\n isinst...
[ "0.74519074", "0.72815156", "0.7090671", "0.65742284", "0.65578085", "0.6463269", "0.63166803", "0.6275632", "0.6265059", "0.62025166", "0.6028817", "0.59755796", "0.58285195", "0.5798127", "0.5781443", "0.5775947", "0.570381", "0.563113", "0.5504642", "0.54982454", "0.548243...
0.8204821
0
Calculates a one's complement integer from the given input value's bits
def ones_complement(x, bits=16): return x ^ ((1 << bits) - 1)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def ones_complement(val):\n #mask = (1 << val.bit_length()) - 1\n #return int(hex(val ^ mask), 16)\n b = bin(val)\n b = b.replace('0', 'x')\n b = b.replace('1', '0')\n b = b.replace('x', '1')\n b = b.replace('1b', '0b')\n return int(b, 2)", "def complement(...
[ "0.80567867", "0.7861335", "0.75720483", "0.7485613", "0.7387961", "0.73278314", "0.7293334", "0.70973384", "0.6932238", "0.6895889", "0.68579566", "0.6638871", "0.663495", "0.66179526", "0.6587711", "0.6580346", "0.6573302", "0.6573302", "0.6572931", "0.6572931", "0.6572931"...
0.75856197
2
Calculates a two's complement integer from the given input value's bits
def twos_complement(input_value, num_bits=16): mask = 2 ** (num_bits - 1) return -(input_value & mask) + (input_value & ~mask)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def twos_complement(n, bits):\n if n < 0 or n >= 2**bits:\n raise ValueError\n\n return 2**bits - n", "def twos_complement(val, bits):\n if (val & (1 << (bits - 1))) != 0: # if sign bit is set e.g., 8bit: 128-255\n val = val - (1 << bits) # compute negative value\n return val ...
[ "0.773702", "0.7683835", "0.76752746", "0.7462103", "0.74167454", "0.7336416", "0.70802414", "0.70657045", "0.70034766", "0.6954941", "0.69006544", "0.68687546", "0.676691", "0.67314565", "0.67230797", "0.67230797", "0.67230797", "0.6720881", "0.6720881", "0.6685405", "0.6657...
0.79249084
0
Transfer models to target port
def transfer(self, target_port: Port, evaluator: Evaluator, config_uids: List[int] = None) -> None: if target_port.name not in self.transfer_defs: print(f"No transfer definition found for target port '{target_port.name}'") return # transfer definitions for specified target port tds = self.transfer_defs[target_port.name] output_dir = os.path.join(script_dir, os.pardir, "output") training_type = "transfer" print(f"TRANSFERRING MODELS TO TARGET PORT '{target_port.name}'") if config_uids is not None: print(f"Transferring configs -> {config_uids} <-") window_width = 50 num_epochs = 25 train_lr = 0.01 fine_num_epochs = 20 fine_tune_lr = 1e-5 batch_size = 1024 # skip port if fully transferred num_not_transferred = 0 for td in tds: for config in self.transfer_configs: if not self._is_transferred(target_port.name, td.base_port_name, config.uid): # print(f"Not transferred: {td.base_port_name} -> {target_port.name} ({config.uid})") num_not_transferred += 1 num_transfers = len(tds) * len(self.transfer_configs) print(f"Transferred count {num_transfers - num_not_transferred}/{num_transfers}") if num_not_transferred == 0: print(f"All transfers done for target port '{target_port.name}': Skipping") return X_ts, y_ts = load_data(target_port, window_width) baseline = mean_absolute_error(y_ts, np.full_like(y_ts, np.mean(y_ts))) evaluator.set_naive_baseline(target_port, baseline) print(f"Naive baseline: {baseline}") # X_train_orig, X_test_orig, y_train_orig, y_test_orig = train_test_split(X_ts, y_ts, test_size=0.2, # random_state=42, shuffle=False) # train_optimizer = Adam(learning_rate=train_lr) # fine_tune_optimizer = Adam(learning_rate=fine_tune_lr) for td in tds: print(f".:'`!`':. TRANSFERRING PORT {td.base_port_name} TO {td.target_port_name} .:'`!`':.") print(f"- - Epochs {num_epochs} </> </> Learning rate {train_lr} - -") print(f"- - Window width {window_width} </> Batch size {batch_size} - -") # print(f"- - Number of model's parameters {num_total_trainable_parameters(model)} device {device} - -") base_port = self.pm.find_port(td.base_port_name) if base_port is None: raise ValueError(f"Unable to associate port with port name '{td.base_port_name}'") # model = inception_time(input_shape=(window_width, 37)) # print(model.summary()) # apply transfer config for config in self.transfer_configs: if config_uids is not None and config.uid not in config_uids: continue if self._is_transferred(target_port.name, td.base_port_name, config.uid): print(f"Skipping config {config.uid}") continue print(f"\n.:'':. APPLYING CONFIG {config.uid} ::'':.") print(f"-> -> {config.desc} <- <-") print(f"-> -> nth_subset: {config.nth_subset} <- <-") print(f"-> -> trainable layers: {config.train_layers} <- <-") _, _, start_time, _, _ = decode_keras_model(os.path.split(td.base_model_path)[1]) model_file_name = encode_keras_model(td.target_port_name, start_time, td.base_port_name, config.uid) file_path = os.path.join(output_dir, "model", td.target_port_name, model_file_name) X_train_orig, X_test_orig, y_train_orig, y_test_orig = train_test_split(X_ts, y_ts, test_size=0.2, random_state=42, shuffle=False) train_optimizer = Adam(learning_rate=train_lr) fine_tune_optimizer = Adam(learning_rate=fine_tune_lr) checkpoint = ModelCheckpoint(file_path, monitor='val_mae', mode='min', verbose=2, save_best_only=True) early = EarlyStopping(monitor="val_mae", mode="min", patience=10, verbose=2) redonplat = ReduceLROnPlateau(monitor="val_mae", mode="min", patience=3, verbose=2) callbacks_list = [checkpoint, early, redonplat] # optimizer = Adam(learning_rate=lr) # # # configure model # model.compile(optimizer=optimizer, loss="mse", metrics=["mae"]) # load base model model = load_model(td.base_model_path) # if config.uid == 0: # print(model.summary()) # else: # print(model.summary()) # del model X_train = X_train_orig X_test = X_test_orig y_train = y_train_orig y_test = y_test_orig # apply transfer configuration if config.nth_subset > 1: if X_train.shape[0] < config.nth_subset: print(f"Unable to apply nth-subset. Not enough data") X_train = X_train_orig[0::config.nth_subset] X_test = X_test_orig[0::config.nth_subset] y_train = y_train_orig[0::config.nth_subset] y_test = y_test_orig[0::config.nth_subset] print(f"Orig shape: {X_train_orig.shape} {config.nth_subset} th-subset shape: {X_train.shape}") print(f"Orig shape: {X_test_orig.shape} {config.nth_subset} th-subset shape: {X_test.shape}") print(f"Orig shape: {y_train_orig.shape} {config.nth_subset} th-subset shape: {y_train.shape}") print(f"Orig shape: {y_test_orig.shape} {config.nth_subset} th-subset shape: {y_test.shape}") modified = False # freeze certain layers for layer in model.layers: if layer.name not in config.train_layers: modified = True print(f"setting layer {layer.name} to False") layer.trainable = False else: print(f"layer {layer.name} stays True") if modified: print(f"modified. compiling") # re-compile model.compile(optimizer=train_optimizer, loss="mse", metrics=["mae"]) # trainable_count = int(np.sum([K.count_params(p) for p in set(model.trainable_weights)])) # non_trainable_count = int(np.sum([K.count_params(p) for p in set(model.non_trainable_weights)])) trainable_count = count_params(model.trainable_weights) non_trainable_count = count_params(model.non_trainable_weights) print(f"Total params: {trainable_count + non_trainable_count}") print(f"Trainable params: {trainable_count}") print(f"Non trainable params: {non_trainable_count}") # transfer model result = model.fit(X_train, y_train, epochs=num_epochs, batch_size=batch_size, verbose=2, validation_data=(X_test, y_test), callbacks=callbacks_list) train_mae = result.history["mae"] val_mae = result.history["val_mae"] gc.collect() tune_result = None tune_train_mae = None tune_val_mae = None if config.tune: print(f"Fine-Tuning transferred model") # apply fine-tuning: unfreeze all but batch-normalization layers! for layer in model.layers: if not layer.name.startswith("batch_normalization"): layer.trainable = True model.compile(optimizer=fine_tune_optimizer, loss="mse", metrics=["mae"]) # print(f"model for fine tuning") # print(model.summary()) tune_result = model.fit(X_train, y_train, epochs=fine_num_epochs, batch_size=batch_size, verbose=2, validation_data=(X_test, y_test), callbacks=callbacks_list) tune_train_mae = tune_result.history["mae"] tune_val_mae = tune_result.history["val_mae"] model.load_weights(file_path) # set evaluation def _compute_mae(_val_mae: List[float], _tune_val_mae: List[float]) -> float: if _tune_val_mae is not None: _val_mae = _val_mae + _tune_val_mae return min(val_mae) evaluator.set_mae(target_port, start_time, _compute_mae(val_mae, tune_val_mae), base_port, config.uid) y_pred = model.predict(X_test) grouped_mae = evaluator.group_mae(y_test, y_pred) evaluator.set_mae(target_port, start_time, grouped_mae, base_port, config.uid) # save history history_file_name = encode_history_file(training_type, target_port.name, start_time, td.base_port_name, config.uid) history_path = os.path.join(output_dir, "data", target_port.name, history_file_name) np.save(history_path, [result.history, tune_result.history if tune_result else None]) # plot history plot_dir = os.path.join(output_dir, "plot") plot_history(train_mae, val_mae, plot_dir, target_port.name, start_time, training_type, td.base_port_name, config.uid, tune_train_mae, tune_val_mae) # evaluator.plot_grouped_mae(target_port, training_type, start_time, config.uid) plot_predictions(y_pred, y_test, plot_dir, target_port.name, start_time, training_type, td.base_port_name, config.uid) self.set_transfer(target_port.name, td.base_port_name, config.uid) del checkpoint, early, redonplat del X_train_orig, X_test_orig, y_train_orig, y_test_orig, model, X_train, y_train, X_test, y_test gc.collect() tf.keras.backend.clear_session() gc.collect() del X_ts, y_ts
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def deploy_to_device(self):\n if self.device_ids is not None and len(self.device_ids) > 1:\n if not isinstance(self.model, torch.nn.DataParallel):\n self.model = torch.nn.DataParallel(self.model, self.device_ids)\n\n self.model = self.model.to(self.device)\n self.crit...
[ "0.60630095", "0.60588175", "0.57406735", "0.56002295", "0.551898", "0.55181533", "0.5453672", "0.5411719", "0.5400127", "0.53960764", "0.538731", "0.53615785", "0.53615785", "0.5356729", "0.53435755", "0.5335418", "0.5325586", "0.5305902", "0.52706647", "0.52651966", "0.5254...
0.6467971
0
Generate TransferDefinitions based on transferconfig.json, containing those ports that have a base training for transferring to another port
def _generate_transfers(self) -> Dict[str, List[TransferDefinition]]: config = read_json(self.config_path) transfer_defs = {} ports = list(config["ports"]) permutations = list(itertools.permutations(ports, r=2)) # for pair in _permute(config["ports"]): for pair in permutations: base_port, target_port = self.pm.find_port(pair[0]), self.pm.find_port(pair[1]) if target_port is None: raise ValueError(f"No port found: Unable to transfer from base-port with name '{base_port.name}'") if target_port is None: raise ValueError(f"No port found: Unable to transfer to target-port with name '{pair[1]}'") trainings = self.pm.load_trainings(base_port, self.output_dir, self.routes_dir, training_type="base") # print(f"loaded trainings. base port {base_port.name}:\n{trainings.keys()}") if len(trainings.keys()) < 1: print(f"No base-training found for port '{base_port.name}'. Skipping") continue training = list(trainings.values())[-1][0] # print(f"training ({len(trainings.values())}): {training}") # print(f"Pair {base_port.name} ({len(trainings)} base-trains) -> {target_port.name}. " # f"Using latest at '{training.start_time}'") verify_output_dir(self.output_dir, target_port.name) td = TransferDefinition(base_port_name=base_port.name, base_model_path=training.model_path, target_port_name=target_port.name, target_routes_dir=os.path.join(self.routes_dir, target_port.name), target_model_dir=os.path.join(self.output_dir, "model", target_port.name), target_output_data_dir=os.path.join(self.output_dir, "data", target_port.name), target_plot_dir=os.path.join(self.output_dir, "plot", target_port.name), target_log_dir=os.path.join(self.output_dir, "log", target_port.name)) name = target_port.name if name in transfer_defs: transfer_defs[target_port.name].append(td) else: transfer_defs[target_port.name] = [td] return transfer_defs
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def transfer(self, target_port: Port, evaluator: Evaluator, config_uids: List[int] = None) -> None:\n if target_port.name not in self.transfer_defs:\n print(f\"No transfer definition found for target port '{target_port.name}'\")\n return\n # transfer definitions for specified ta...
[ "0.6021851", "0.5251869", "0.51448715", "0.5136205", "0.5126871", "0.5095791", "0.50636524", "0.49946162", "0.49695787", "0.49350056", "0.49320048", "0.49314785", "0.4922587", "0.48967493", "0.48889312", "0.4869051", "0.48633268", "0.4862456", "0.4857697", "0.484664", "0.4846...
0.8338399
0
Initialization of each point
def __init__(self, p0: Point, p1: Point, c0: Point = None, c1: Point = None) -> None: self.p0 = p0 self.p1 = p1 self.c0 = c0 if c0 is not None else p0 self.c1 = c1 if c1 is not None else p1
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __init__(self, points):\n self.points = points\n self.init()", "def __init__(self, pts=[]):\n self.set_points(pts)", "def __init__(self, points):\n\t\tself.points = points", "def __init__(self, num_points=5000):\n self.num_points = num_points\n\n # All walks start at (0...
[ "0.78019327", "0.74978656", "0.74227774", "0.71523833", "0.71523833", "0.71206594", "0.7075273", "0.70572877", "0.70357925", "0.6961247", "0.69041634", "0.68821687", "0.68821687", "0.68765813", "0.6853304", "0.6852539", "0.6852539", "0.68471855", "0.6817271", "0.6817271", "0....
0.65025926
41
Alternate initializer for compact input of coordinates
def from_floats(cls, *floats): curves = [] for i in range(0, len(floats) - 2, 6): p1 = (floats[i], floats[i + 1]) c1 = (floats[i + 2], floats[i + 3]) c2 = (floats[i + 4], floats[i + 5]) p2 = (floats[i + 6], floats[i + 7]) curves.append(CubicBezierCurve(p1, p2, c1, c2)) return cls(curves)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __init__(self, lon: float = 0, lat: float = 0):\n super(Point2D, self).__init__(lon, lat)", "def __init__(self, d):\n\t\tself._coords = [0] * d", "def __init__(self, d):\n self._coords = [0]*d", "def __init__(self, coordinates):\n\n if len(coordinates) != 5:\n raise ValueE...
[ "0.63444114", "0.63369143", "0.6208486", "0.6187477", "0.61472523", "0.61472523", "0.614598", "0.6136781", "0.61134976", "0.6090093", "0.607837", "0.6063738", "0.60410446", "0.6038614", "0.6038257", "0.6028245", "0.60152465", "0.60152465", "0.60152465", "0.5991858", "0.597739...
0.0
-1
Compares each curve with the next to verify continuity. Note that this function treats curves as directed, thus two curves that start at the same point will return `False` when compared.
def assert_continuous(*curves: CubicBezierCurve) -> bool: if not curves: raise ValueError("CurveChecker.assert_continuous() cannot be called on an empty list") previous_curve = curves[0] for curve in curves[1:]: if previous_curve.p1 != curve.p0: return False previous_curve = curve return True
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def assert_differentiable(*curves: CubicBezierCurve) -> bool:\n if not curves:\n raise ValueError(\"CurveChecker.assert_differentiable() cannot be called on an empty list\")\n\n if not assert_continuous(*curves):\n return False\n\n for curve0, curve1 in zip(curves, curves[1:]):\n if n...
[ "0.6867624", "0.65761954", "0.60859615", "0.60859615", "0.6069512", "0.5920149", "0.5899444", "0.58113146", "0.5743151", "0.5739253", "0.5737969", "0.57277334", "0.5719145", "0.570918", "0.5702754", "0.5661689", "0.5660275", "0.5631335", "0.5627203", "0.5584733", "0.55798703"...
0.67138547
1
Verifies that the adjacent slopes between points are within specified tolerance of one another. Note that assert_collinear assumes ordered points; three actually collinear points passed with the middle point as the first or last argument will return `False`
def assert_collinear(*points: Point, tolerance: float = 1e-2) -> bool: if len(points) < 3: raise ValueError("CurveChecker.assert_collinear() must be called with at least three points") thetas = [np.arctan2(p0[1] - p1[1], p0[0] - p1[0]) for p0, p1 in zip(points, points[1:])] for t0, t1 in zip(thetas, thetas[1:]): if abs(t0 - t1) > tolerance: return False return True
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def isCollinear(a,b,c):\r\n #return slope(a, b) == slope(b, c) == slope(c, a) #DOES NOT WORK\r\n #return (b[0] - a[0]) * (c[1] - a[1]) == (c[0] - a[0]) * (b[1] - a[1]) \r\n #return distance(a,b) + distance(b,c) == distance(a,c)\r\n x1 = a[0]\r\n y1 = a[1]\r\n x2 = b[0]\r\n y2 = b[1]\r\n x3 ...
[ "0.70717025", "0.6318565", "0.6101531", "0.60696363", "0.605181", "0.59607244", "0.5879398", "0.5877427", "0.5806856", "0.57670236", "0.5622218", "0.5608094", "0.5563991", "0.55154955", "0.5489862", "0.54876494", "0.54695606", "0.5420858", "0.5409832", "0.5381098", "0.5375114...
0.80137265
0
Verifies differentiability of curves by checking collinearity of adjacent curves' control points
def assert_differentiable(*curves: CubicBezierCurve) -> bool: if not curves: raise ValueError("CurveChecker.assert_differentiable() cannot be called on an empty list") if not assert_continuous(*curves): return False for curve0, curve1 in zip(curves, curves[1:]): if not assert_collinear(curve0.c1, curve1.p0, curve1.c0): return False return True
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def isCollinear(a,b,c):\r\n #return slope(a, b) == slope(b, c) == slope(c, a) #DOES NOT WORK\r\n #return (b[0] - a[0]) * (c[1] - a[1]) == (c[0] - a[0]) * (b[1] - a[1]) \r\n #return distance(a,b) + distance(b,c) == distance(a,c)\r\n x1 = a[0]\r\n y1 = a[1]\r\n x2 = b[0]\r\n y2 = b[1]\r\n x3 ...
[ "0.6670978", "0.6513759", "0.6198429", "0.6138979", "0.61240935", "0.61158717", "0.60906714", "0.6050059", "0.6006302", "0.59238374", "0.5918185", "0.59094197", "0.58958864", "0.5882234", "0.5876733", "0.5855049", "0.579847", "0.579847", "0.577797", "0.57669806", "0.5764593",...
0.68860763
0
Converts a path to a string representation for inclusion in an SVG file as
def path_to_string(path: Path) -> str: assert_continuous(path) pieces = ["M {} {}".format(path[0].p0[0], path[0].p0[1])] for curve in iter(path): # iter cast not strictly necessary piece = "C {} {} {} {} {} {}".format( int(round(curve.c0[0])), int(round(curve.c0[1])), int(round(curve.c1[0])), int(round(curve.c1[1])), int(round(curve.p1[0])), int(round(curve.p1[1])) ) pieces.append(piece) return " ".join(pieces)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def as_string(path: pathlib.Path) -> str:\n return path.as_posix()", "def path_to_str(path):\n if hasattr(path, '__fspath__'):\n path = as_str_any(path.__fspath__())\n return path", "def save_svg(string, file_name):\n file_handle = file(file_name, \"w\")\n file_handle.write(string)\n ...
[ "0.664361", "0.6505727", "0.6453225", "0.6349524", "0.631118", "0.62037516", "0.6048819", "0.6006023", "0.5888196", "0.5883319", "0.5875403", "0.5838811", "0.578169", "0.57720757", "0.57636064", "0.5753614", "0.5715025", "0.57089794", "0.569792", "0.5695481", "0.56592214", ...
0.6743669
0
Tests that the processor can be initialized.
def setUp(self): self.logger = mock.MagicMock() test_state = state.DFTimewolfState(config.Config) self.turbinia_processor = turbinia_base.TurbiniaProcessorBase( test_state, self.logger) file_path = os.path.join( CURRENT_DIR, "test_data", "turbinia_request_status.json") self._request_status = json.load(open(file_path))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _should_initialize_check_run(self, payload):\n action = payload.get('action')\n return action in self.initialize_actions or self.initialize_actions is None", "def test_init_success(self):\n found = False\n try:\n pyint = Interpreter()\n except InitializationExcep...
[ "0.6729422", "0.65866035", "0.6567162", "0.65516055", "0.63776916", "0.63715816", "0.63663733", "0.626371", "0.624758", "0.62021387", "0.6187901", "0.6163606", "0.6123539", "0.61059314", "0.60137165", "0.5975385", "0.5962163", "0.59609634", "0.5944189", "0.5939157", "0.593644...
0.0
-1
Tests the TurbiniaSetup method.
def testTurbiniaSetup(self, _mock_read_config): _mock_read_config.return_value = {"OUTPUT_DIR": "/tmp"} self.turbinia_processor.TurbiniaSetUp( project="turbinia-project", turbinia_auth=False, turbinia_recipe=None, turbinia_zone="us-central1f", turbinia_api="http://localhost:8001", incident_id="123456789", sketch_id="12345", ) self.assertEqual(self.turbinia_processor.project, "turbinia-project") self.assertEqual(self.turbinia_processor.turbinia_zone, "us-central1f") self.assertEqual( self.turbinia_processor.turbinia_api, "http://localhost:8001") self.assertEqual(self.turbinia_processor.incident_id, "123456789") self.assertEqual(self.turbinia_processor.sketch_id, "12345") self.assertEqual(self.turbinia_processor.output_path, "/tmp") self.assertEqual(self.turbinia_processor.turbinia_recipe, None)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def setup():\n pass", "def TurbiniaSetUp(\n self, project: str, turbinia_auth: bool,\n turbinia_recipe: Union[str, None], turbinia_zone: str, turbinia_api: str,\n incident_id: str, sketch_id: int) -> None:\n self.project = project\n self.turbinia_auth = turbinia_auth\n self.turbinia_ap...
[ "0.6720269", "0.65703833", "0.6467602", "0.6449046", "0.6447386", "0.64372605", "0.64077723", "0.6403362", "0.63865745", "0.635866", "0.6345223", "0.63323843", "0.63103575", "0.6282889", "0.6277148", "0.62679124", "0.62679124", "0.62679124", "0.62679124", "0.62679124", "0.626...
0.77893466
0
Tests the TurbiniaStart method.
def testTurbiniaStart(self, mock_create_request): mock_create_request.return_value = { "request_id": "41483253079448e59685d88f37ab91f7" } mock_api_instance = mock.MagicMock() mock_api_instance.create_request = mock_create_request self.turbinia_processor.requests_api_instance = mock_api_instance evidence = { "type": "GoogleCloudDisk", "disk_name": "disk-1", "project": "project-1", "zone": "us-central1-f", } request_id = self.turbinia_processor.TurbiniaStart( evidence=evidence, yara_rules=YARA_RULE) self.assertEqual(request_id, "41483253079448e59685d88f37ab91f7")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def testStart(self):\n self.machine.stop()\n self.machine.start(safe.Settling)\n \n self.assertCurrentMotion(motion.common.Hover)\n \n self.releaseTimer(safe.Settling.SETTLED)\n self.assertCurrentState(safe.Grabbing)", "def startTestRun(self):", "def startTest(a...
[ "0.71255654", "0.6825471", "0.6790718", "0.66931415", "0.6602822", "0.6602822", "0.6602822", "0.6602822", "0.6589439", "0.6544202", "0.65038764", "0.650213", "0.63993055", "0.63993055", "0.6367139", "0.6244631", "0.6237109", "0.6224533", "0.61932135", "0.61699873", "0.6169987...
0.63942826
14
Tests the TurbiniaWait method.
def testTurbiniaWait(self, mock_get_request_status, _): mock_api_instance = mock.MagicMock() mock_api_instance.create_request = mock_get_request_status self.turbinia_processor.requests_api_instance = mock_api_instance mock_get_request_status.return_value = self._request_status for task, path in self.turbinia_processor.TurbiniaWait(TASK_ID): # Check that the task and path are correct for a PlasoParserTask if task["id"] == TASK_ID: self.assertEqual(task, self._request_status["tasks"][0]) self.assertEqual(path, TEST_TASK_PATH) break
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def wait():\n pass", "def wait():\n time.sleep(1)", "def do_wait(self):\n pass", "def wait(wait_time=WAIT_TIME):\n # time.sleep(wait_time)\n pass", "def wait(self):\n pass", "def wait(self):\n pass", "def wait(self):\n time.sleep(0.010)", "def wait(cls, quad):\n\t\...
[ "0.761487", "0.74810785", "0.7287967", "0.6939687", "0.68942183", "0.68942183", "0.6890984", "0.6889294", "0.6812686", "0.6812686", "0.6812686", "0.6812686", "0.6812686", "0.6812686", "0.6812686", "0.6812686", "0.6812686", "0.6812686", "0.6796886", "0.6759803", "0.67246556", ...
0.6903799
4
Tests the _isInterestingPath method.
def testIsInterestingPath(self): # pylint: disable=protected-access self.assertTrue(self.turbinia_processor._isInterestingPath(TEST_TASK_PATH))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _isInterestingPath(self, path: str) -> bool:\n for suffix in self.extensions:\n if path.endswith(suffix):\n return True\n return False", "def test_find_path_bi():\n assert True", "def is_path(self, s):\n return True", "def _is_interesting_op(self, op):\n return op_priority(...
[ "0.72631514", "0.58760685", "0.58147526", "0.56765735", "0.55693245", "0.55457276", "0.54889286", "0.5446209", "0.53737456", "0.53603786", "0.53477657", "0.5298553", "0.5297065", "0.5293575", "0.5228027", "0.5226015", "0.519997", "0.51928556", "0.5165649", "0.51393193", "0.51...
0.81139785
0
Tests the _ExtractFiles method.
def testExtractPath(self, mock_tempdir): mock_tempdir.return_value = '/tmp' file_path = os.path.join( CURRENT_DIR, "test_data", "c4e9abd577db475484b2ded34a011b96.tgz") expected_local_path = f"/tmp{TEST_TASK_PATH}" # pylint: disable=protected-access local_path = self.turbinia_processor._ExtractFiles( file_path, TEST_TASK_PATH) self.assertEqual(local_path, expected_local_path)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def extract_files(self) -> list:\n pass", "def extract(self):\n self.build_path_pairs()\n self.extract_field_blocks()\n self.assert_filenames()", "def test_zip_files(self):\n base_zip_files = ['whypython.txt', 'states.dbf', 'cities.kmz']\n\n text_file = os.path.join(os...
[ "0.6961958", "0.67344284", "0.65078896", "0.64383835", "0.6424595", "0.6330281", "0.6321087", "0.63068205", "0.62984717", "0.62395144", "0.62120587", "0.61898607", "0.6165155", "0.61557406", "0.6154367", "0.6150874", "0.6107343", "0.61028737", "0.60906625", "0.60642004", "0.6...
0.64720875
3
Tests the RefreshClientCredentials method.
def testRefreshClientCredentials(self, mock_get_credentials, mock_initialize_client): # Set an expired token. self.turbinia_processor.credentials = mock.MagicMock( expiry = FAKE_CREDENTIALS['expiry'], expired = True) self.turbinia_processor.RefreshClientCredentials() mock_get_credentials.assert_called_once() mock_initialize_client.assert_called_once()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def RefreshClientCredentials(self) -> bool:\n refresh = False\n if self.credentials and self.credentials.expired:\n self.credentials = self.GetCredentials(\n self.credentials_path, self.client_secrets_path)\n self.client = self.InitializeTurbiniaApiClient(self.credentials)\n refresh =...
[ "0.7480343", "0.6987335", "0.6519567", "0.647827", "0.64674634", "0.6362675", "0.6351274", "0.6320688", "0.6314245", "0.6286668", "0.6279366", "0.6243212", "0.6241278", "0.6184748", "0.6173102", "0.61290675", "0.6063998", "0.6048179", "0.60427195", "0.6041711", "0.6039306", ...
0.791131
0
Tests the InitializeTurbiniaApiClient method.
def testInitializeTurbiniaApiClientNoCreds(self, mock_get_credentials): self.turbinia_processor.turbinia_api = 'http://127.0.0.1:8000' self.turbinia_processor.turbinia_auth = True mock_credentials = mock.MagicMock(spec=Credentials, id_token = FAKE_CREDENTIALS['token']) mock_credentials.id_token = mock.MagicMock() mock_credentials.id_token.return_value = FAKE_CREDENTIALS['token'] self.turbinia_processor.credentials = mock_credentials mock_get_credentials.return_value = mock_credentials result = self.turbinia_processor.InitializeTurbiniaApiClient(None) mock_get_credentials.assert_called_once() self.assertIsInstance(result, turbinia_api_lib.ApiClient)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def testInitializeTurbiniaApiClient(self, mock_get_credentials):\n self.turbinia_processor.turbinia_api = 'http://127.0.0.1:8000'\n self.turbinia_processor.turbinia_auth = True\n mock_credentials = mock.MagicMock(spec=Credentials, id_token = FAKE_CREDENTIALS['token'])\n mock_credentials.id_token = mock...
[ "0.763972", "0.71817064", "0.69348216", "0.6902786", "0.6898086", "0.68290806", "0.68290806", "0.6813784", "0.6806354", "0.67816716", "0.6775598", "0.6775598", "0.6775598", "0.6712668", "0.6631375", "0.65852684", "0.65815634", "0.6529811", "0.6472687", "0.6472687", "0.6472687...
0.73480415
1
Tests the InitializeTurbiniaApiClient method.
def testInitializeTurbiniaApiClient(self, mock_get_credentials): self.turbinia_processor.turbinia_api = 'http://127.0.0.1:8000' self.turbinia_processor.turbinia_auth = True mock_credentials = mock.MagicMock(spec=Credentials, id_token = FAKE_CREDENTIALS['token']) mock_credentials.id_token = mock.MagicMock() mock_credentials.id_token.return_value = FAKE_CREDENTIALS['token'] self.turbinia_processor.credentials = mock_credentials mock_get_credentials.return_value = mock_credentials result = self.turbinia_processor.InitializeTurbiniaApiClient(mock_credentials) mock_get_credentials.assert_not_called() self.assertIsInstance(result, turbinia_api_lib.ApiClient)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def testInitializeTurbiniaApiClientNoCreds(self, mock_get_credentials):\n self.turbinia_processor.turbinia_api = 'http://127.0.0.1:8000'\n self.turbinia_processor.turbinia_auth = True\n mock_credentials = mock.MagicMock(spec=Credentials, id_token = FAKE_CREDENTIALS['token'])\n mock_credentials.id_token...
[ "0.73480415", "0.71817064", "0.69348216", "0.6902786", "0.6898086", "0.68290806", "0.68290806", "0.6813784", "0.6806354", "0.67816716", "0.6775598", "0.6775598", "0.6775598", "0.6712668", "0.6631375", "0.65852684", "0.65815634", "0.6529811", "0.6472687", "0.6472687", "0.64726...
0.763972
0
Test whether a section exists.
def has_section(self, section): return self.cfg.has_section(section)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def is_section_exist(self, section_name: str) -> bool:\n pass", "def has_section(self, section):\n raise NotImplementedError()", "def has_section(self, section):\n if section in self._dict:\n return True\n return False", "def has_section(self, section):\n\n return se...
[ "0.8818048", "0.8469707", "0.8389857", "0.8208384", "0.8178421", "0.8061962", "0.7974788", "0.7825325", "0.752125", "0.7518481", "0.7350608", "0.7341947", "0.7030508", "0.6875858", "0.6844375", "0.6687593", "0.6592266", "0.6513766", "0.64952224", "0.645685", "0.6444705", "0...
0.82564044
3
Test whether an option exists.
def has_option(self, option, section = None): if section is None: section = self.default_section return self.cfg.has_option(section, option)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def has_option(self, key):\n\n return key in self.__opt", "def has_option(self, name):\n return self.option_settings[name]", "def has_option(self, option):\n\t\treturn self.config_parser.has_option(self.section_name, option)", "def has_option(self, option):\n splitvals = option.split('/'...
[ "0.8260075", "0.81346714", "0.79610306", "0.7878082", "0.78457946", "0.7843575", "0.78201646", "0.78074056", "0.77991354", "0.7590212", "0.75709635", "0.7506453", "0.74686354", "0.73545367", "0.73482585", "0.72637665", "0.723397", "0.69554585", "0.68729013", "0.6776543", "0.6...
0.7794908
9
Parse OpenSSLstyle foo.0, foo.1, ... subscripted options. Returns a list of values matching the specified option name.
def multiget(self, option, section = None): matches = [] if section is None: section = self.default_section if self.cfg.has_option(section, option): matches.append((-1, self.get(option, section = section))) for key, value in self.cfg.items(section): s = key.rsplit(".", 1) if len(s) == 2 and s[0] == option and s[1].isdigit(): matches.append((int(s[1]), self.get(option, section = section))) matches.sort() return [match[1] for match in matches]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_option(self, name):\r\n if not isinstance(name, str):\r\n name = \" \".join(name)\r\n lines = self.sendAndRecv(\"GETCONF %s\\r\\n\" % name)\r\n\r\n r = []\r\n for _,line,_ in lines:\r\n try:\r\n key, val = line.split(\"=\", 1)\r\n r.append((key,val))\r\n except Valu...
[ "0.5681802", "0.5620391", "0.5565046", "0.541359", "0.53751975", "0.5340231", "0.5283279", "0.5276728", "0.52554685", "0.5251639", "0.51984483", "0.5196909", "0.519201", "0.5174309", "0.5122386", "0.51024044", "0.51020473", "0.5095427", "0.5034037", "0.5030856", "0.50224656",...
0.5834238
0
Replacement function for indirect variable substitution. This is intended for use with re.subn().
def _repl(self, m): section, option = m.group(1, 2) if section == "ENV": return os.getenv(option, "") else: return self.cfg.get(section, option)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def do_variable_substitution(item):\n if isinstance(item, str):\n try:\n item = re_keyref.sub(getdata, item)\n except KeyError, err:\n print >> sys.stderr, (\n \"Use of undefined key in variable substitution: %s\"\n % ...
[ "0.6543323", "0.64928055", "0.64095277", "0.640149", "0.6348403", "0.6247242", "0.61828613", "0.6170363", "0.60472184", "0.6013036", "0.6008018", "0.59389454", "0.5931353", "0.58995754", "0.5893741", "0.58664304", "0.58525014", "0.58441067", "0.5818302", "0.5775025", "0.57669...
0.0
-1
Get an option, perhaps with a default value.
def get(self, option, default = None, section = None): if section is None: section = self.default_section if default is not None and not self.cfg.has_option(section, option): return default val = self.cfg.get(section, option) while True: val, modified = self._regexp.subn(self._repl, val, 1) if not modified: return val
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _get_option(self, name, datatype, default):\n return config.get_option(self._options,\n name,\n type=datatype,\n default=default)", "def __get_option(self, option):\n if option in Config.OPTIONS.keys...
[ "0.8309457", "0.8285595", "0.8050056", "0.78617734", "0.78480136", "0.7729407", "0.76687425", "0.7613236", "0.76116955", "0.7508143", "0.7377448", "0.7367811", "0.7332215", "0.7305065", "0.7281587", "0.7236693", "0.72000957", "0.7122571", "0.71041566", "0.7097126", "0.7038417...
0.6900306
25
Get a boolean option, perhaps with a default value.
def getboolean(self, option, default = None, section = None): v = self.get(option, default, section) if isinstance(v, str): v = v.lower() if v not in self.cfg._boolean_states: raise ValueError, "Not a boolean: %s" % v v = self.cfg._boolean_states[v] return v
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getboolean(self, option, default=None):\n\t\treturn self._get_raw(option, 'boolean', default)", "def get_bool(options, name, default=False):\n value = options.get(name)\n if not value:\n return default\n if value.lower() == 'true':\n return True\n elif value.lower() == 'false':\n ...
[ "0.8732398", "0.86066973", "0.84299433", "0.8366191", "0.82892025", "0.8260845", "0.822819", "0.8115869", "0.78542686", "0.7821285", "0.78019625", "0.7718613", "0.7465177", "0.7321887", "0.73198515", "0.7313598", "0.7216047", "0.7181007", "0.7140331", "0.71299386", "0.7106636...
0.8165139
7
Get an integer option, perhaps with a default value.
def getint(self, option, default = None, section = None): return int(self.get(option, default, section))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getint(self, option, default=None):\n\t\treturn self._get_raw(option, 'int', default)", "def getint(self, option):\n return getint(self.name, option)", "def getint(self, section, option, default=None):\r\n return self.get(section, option, type=int, default=default)", "def getint(self, option, a...
[ "0.84793097", "0.81527907", "0.79768705", "0.7929183", "0.7760084", "0.7490237", "0.73363376", "0.71646047", "0.7082002", "0.7010749", "0.6840518", "0.66669184", "0.65775895", "0.6542784", "0.65416443", "0.652801", "0.63949615", "0.6394363", "0.63876903", "0.633896", "0.63153...
0.8415297
1
Get a long integer option, perhaps with a default value.
def getlong(self, option, default = None, section = None): return long(self.get(option, default, section))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getint(self, option, default = None, section = None):\n return int(self.get(option, default, section))", "def getLong(self, int: int, int2: int) -> int:\n ...", "def getLong(t, swipl):\n i = c_long()\n if swipl.PL_get_long(t, byref(i)):\n return i.value\n else:\n raise Inva...
[ "0.70192164", "0.68363434", "0.6702272", "0.66861874", "0.6630546", "0.66111416", "0.6431476", "0.6385897", "0.6310543", "0.628531", "0.6267423", "0.62353796", "0.6210837", "0.61966425", "0.6178884", "0.6079542", "0.60778195", "0.60364276", "0.59944767", "0.59922874", "0.5992...
0.8568428
0
Consolidated control for all the little global control flags scattered through the libraries. This isn't a particularly good place for this function to live, but it has to live somewhere and making it a method of the config parser from which it gets all of its data is less silly than the available alternatives.
def set_global_flags(self): import rpki.http, rpki.x509, rpki.sql, rpki.async, rpki.log try: rpki.http.debug_http = self.getboolean("debug_http") except ConfigParser.NoOptionError: pass try: rpki.http.want_persistent_client = self.getboolean("want_persistent_client") except ConfigParser.NoOptionError: pass try: rpki.http.want_persistent_server = self.getboolean("want_persistent_server") except ConfigParser.NoOptionError: pass try: rpki.http.use_adns = self.getboolean("use_adns") except ConfigParser.NoOptionError: pass try: rpki.http.enable_ipv6_clients = self.getboolean("enable_ipv6_clients") except ConfigParser.NoOptionError: pass try: rpki.http.enable_ipv6_servers = self.getboolean("enable_ipv6_servers") except ConfigParser.NoOptionError: pass try: rpki.x509.CMS_object.debug_cms_certs = self.getboolean("debug_cms_certs") except ConfigParser.NoOptionError: pass try: rpki.sql.sql_persistent.sql_debug = self.getboolean("sql_debug") except ConfigParser.NoOptionError: pass try: rpki.async.timer.gc_debug = self.getboolean("gc_debug") except ConfigParser.NoOptionError: pass try: rpki.async.timer.run_debug = self.getboolean("timer_debug") except ConfigParser.NoOptionError: pass try: rpki.x509.XML_CMS_object.dump_outbound_cms = rpki.x509.DeadDrop(self.get("dump_outbound_cms")) except ConfigParser.NoOptionError: pass try: rpki.x509.XML_CMS_object.dump_inbound_cms = rpki.x509.DeadDrop(self.get("dump_inbound_cms")) except ConfigParser.NoOptionError: pass try: rpki.async.gc_summary(self.getint("gc_summary"), self.getint("gc_summary_threshold", 0)) except ConfigParser.NoOptionError: pass try: rpki.log.enable_tracebacks = self.getboolean("enable_tracebacks") except ConfigParser.NoOptionError: pass
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _GclStyleSettings(self):\n settings = {\n 'port': self.GetCodeReviewSetting('TRYSERVER_HTTP_PORT'),\n 'host': self.GetCodeReviewSetting('TRYSERVER_HTTP_HOST'),\n 'svn_repo': self.GetCodeReviewSetting('TRYSERVER_SVN_URL'),\n 'gerrit_url': self.GetCodeReviewSetting('TRYSERVER_GERRIT_URL'),...
[ "0.5948904", "0.55440557", "0.55275595", "0.5320084", "0.5296819", "0.5249181", "0.5204246", "0.520022", "0.51432735", "0.5137308", "0.5134827", "0.5134588", "0.5134572", "0.5123408", "0.50960565", "0.5094958", "0.50912803", "0.50796866", "0.50630355", "0.50593835", "0.505469...
0.66763616
0
Read inputs and find keys with multiple values.
def main() : parser = argparse.ArgumentParser() parser.add_argument("key", help="column name of the KEY", nargs=1) parser.add_argument("val", help="column name of the VALUE", nargs=1) parser.add_argument("infile", help="CSV input file", nargs=1) parser.add_argument("outfile", help="CSV output file", nargs=1) try : args = parser.parse_args() except IOError as e : print (e) sys.exit(1) analyzer = Analyzer(args.key[0], args.val[0]) analyzer.read(*args.infile) analyzer.write(*args.outfile)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def keysWhichMatch(cls, *args):\n if len(cls.keys) < len(args) > 0:\n raise ValueError('Number of keys provided is too long.\\n'\n 'Len Class Keys: %s\\n'\n 'Len Provided Keys: %s\\n' % (len(cls.keys), len(args)))\n\n index = 0\n ...
[ "0.5824653", "0.5802758", "0.578554", "0.5753453", "0.5746162", "0.5673391", "0.5568094", "0.5562528", "0.5508408", "0.5508204", "0.55056906", "0.5499257", "0.5487545", "0.54547495", "0.5448356", "0.5445263", "0.54099", "0.5394247", "0.5388735", "0.53838193", "0.53304183", ...
0.0
-1
Checks for number of vertices >>> num_vertices([[1, 1], [1, 1]], [[1, 1, 1], [1, 1, 1], [1, 1, 1]]) False >>> num_vertices([[1, 0], [0, 1]], [[0, 0], [0, 0]]) True
def num_vertices(graph1: list, graph2: list): if len(graph1[0]) != len(graph2[0]): return False return True
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def hasvertices(self):\n if len(self.vertices) > 0:\n return True\n else:\n return False", "def num_vertices(self):\n return len(self.vertices)", "def num_vertices(self):\n return len(self.vertices)", "def getNumVertices(self):\n return len(self.V)", ...
[ "0.7340938", "0.7330129", "0.7330129", "0.71462345", "0.7098103", "0.70285016", "0.6986101", "0.6935374", "0.68259686", "0.67648", "0.67457217", "0.66626644", "0.6626465", "0.6615794", "0.6540091", "0.6527618", "0.6514949", "0.64980805", "0.64802784", "0.64247876", "0.6411660...
0.7056209
5
Checks for number of edges >>> num_edges([[1, 1], [0, 1]], [[1, 1], [1, 1]]) False >>> num_edges([[1, 0], [0, 1]], [[0, 1], [1, 0]]) True
def num_edges(graph1: list, graph2: list): check1 = 0 check2 = 0 for row, _ in enumerate(graph1): for column, _ in enumerate(graph1[row]): if graph1[row][column] == 1: check1 += 1 if graph2[row][column] == 1: check2 += 1 return check1 == check2
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def num_edges(self):\r\n return len(self.__generate_edges())", "def _number_of_edges(self):\n if self._edges is None:\n return 0\n return len(self._edges)", "def Test_NumEdges(Graph_MD):\n N_Edges = float(Graph_MD.number_of_edges())\n\n return N_Edges", "def has_edges(s...
[ "0.7388578", "0.73457235", "0.7252701", "0.712351", "0.70799005", "0.7057893", "0.6985507", "0.6887687", "0.68730575", "0.68627745", "0.68099093", "0.6797033", "0.66984785", "0.6688148", "0.66881293", "0.66494614", "0.6648401", "0.6627459", "0.66019803", "0.6598041", "0.65855...
0.68215644
10
Checks for vertices' degrees >>> vertices_degree([[1, 0], [1, 1]], [[0, 1], [1, 0]]) (False, []) >>> vertices_degree([[1, 1], [0, 1]], [[1, 0], [1, 1]]) (True, [2, 1], [1, 2])
def vertices_degree(graph1: list, graph2: list): check1 = [] check2 = [] for row, _ in enumerate(graph1): degree1 = 0 degree2 = 0 for column, _ in enumerate(graph1[row]): if graph1[row][column] == 1: degree1 += 1 if graph2[row][column] == 1: degree2 += 1 check1.append(degree1) check2.append(degree2) if sorted(check1) == sorted(check2): return True, check1, check2 return False, []
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _has_degree(\n self,\n degree: int,\n vertex: Vertex,\n ) -> bool:\n\n return vertex.get_id() in self._vertices_of_degree[degree]", "def vertice_degree(self):\r\n if(self.is_empty()):\r\n raise ValueError(\"Graph is empty.\")\r\n else:\r\n if...
[ "0.70981395", "0.6648923", "0.66187066", "0.6453982", "0.6208992", "0.6075277", "0.6049173", "0.6037286", "0.59577584", "0.5948439", "0.59199977", "0.5896408", "0.58028036", "0.580261", "0.57468504", "0.57298976", "0.5702665", "0.5702665", "0.56859505", "0.56500363", "0.56339...
0.6969743
1
Checks if there can be bijection between two graphs
def permutations(graph1: list, graph2: list, degrees: tuple): degrees1 = degrees[0] degrees2 = degrees[1] check1 = [] check2 = [] for index, _ in enumerate(degrees1): degree = degrees1[index] temp = [] for vertex, _ in enumerate(graph1[index]): if graph1[index][vertex] == 1: temp.append(degrees1[vertex]) check1.append((degree, tuple(sorted(temp)))) for index, _ in enumerate(degrees2): degree = degrees2[index] temp = [] for vertex in range(len(graph2[index])): if graph2[index][vertex] == 1: temp.append(degrees2[vertex]) check2.append((degree, tuple(sorted(temp)))) return len(set(check1 + check2)) == len(set(check1))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def is_bipartite(G):\n try:\n bipartite_color(G)\n return True\n except:\n return False", "def compare_graphs(self):\n\t\tpass", "def graph_issuperset(graph1, graph2):\n\n # Validate if all arguments are Graphs\n check_graphbase_instance(graph1, graph2)\n\n return graph1.nod...
[ "0.58745533", "0.58431983", "0.5821069", "0.58176845", "0.5800676", "0.5800201", "0.571649", "0.56977814", "0.56428045", "0.56401634", "0.5639373", "0.5630606", "0.5611228", "0.5610141", "0.5608601", "0.559945", "0.55920273", "0.5584586", "0.557866", "0.5549889", "0.55390096"...
0.5283513
41
Main function for checking isomorphism >>> check_for_isomorphism([(1, 2), (1, 3), (1, 5),\ (2, 4), (2, 6), (3, 1), (3, 4),\ (4, 2), (5, 1), (5, 6), (5, 7), (6, 8), (7, 8)],\ [(1, 2), (1, 3), (1, 5), (2, 4), (3, 1), (3, 4),\ (3, 7), (4, 2), (5, 6), (5, 7), (6, 8), (7, 8)]) True >>> check_for_isomorphism([(1, 3), (1, 5),\ (2, 4), (2, 6), (3, 1), (3, 4),\ (4, 2), (5, 1), (5, 6), (5, 7), (6, 8), (7, 8)],\ [(1, 2), (1, 3), (1, 5), (2, 4), (3, 1), (3, 4),\ (3, 7), (4, 2), (5, 6), (5, 7), (6, 8), (7, 8)]) False
def check_for_isomorphism(graph1: list, graph2: list, directed=False) -> bool: matrix1 = get_adjancy_matrix(graph1, directed) matrix2 = get_adjancy_matrix(graph2, directed) if num_vertices(matrix1, matrix2): if num_edges(matrix1, matrix2): degrees = vertices_degree(matrix1, matrix2) if degrees[0]: return permutations(matrix1, matrix2, degrees[1:]) return False
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_check_isomorphism(self):\n mol1 = Molecule(smiles='[O-][N+]#N')\n mol2 = Molecule(smiles='[N-]=[N+]=O')\n self.assertTrue(converter.check_isomorphism(mol1, mol2))", "def test_is_isomorphic(self):\n mol1 = converter.s_bonds_mol_from_xyz(self.xyz1['dict'])\n mol2 = conve...
[ "0.68144727", "0.61583287", "0.57273215", "0.5682086", "0.561438", "0.5609958", "0.5602563", "0.55251133", "0.5451978", "0.5407561", "0.5355188", "0.5268322", "0.5252409", "0.5186853", "0.51790214", "0.50514466", "0.50385714", "0.49780542", "0.49707803", "0.4955957", "0.49171...
0.5521835
8
r""" Wait for the user to type a character (and hit Enter). If the user enters one of the characters in letters, return that character. If the user hits Enter without entering a character, and default is specified, returns `default`, Otherwise, asks the user to enter a character again.
def _prompt(letters='yn', default=None): import sys while True: try: inputstr = sys.stdin.readline().strip() except KeyboardInterrupt: sys.exit(0) if inputstr and inputstr in letters: return inputstr if default is not None and inputstr == '': return default print 'Come again?'
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _prompt(letters='yn', default=None):\n while True:\n try:\n input_text = sys.stdin.readline().strip()\n except KeyboardInterrupt:\n sys.exit(0)\n if input_text and input_text in letters:\n return input_text\n if default is not None and input_text ...
[ "0.7521456", "0.7162696", "0.7019684", "0.6967578", "0.6841188", "0.67817444", "0.6743779", "0.66840625", "0.66546315", "0.65572464", "0.6538709", "0.64224786", "0.636659", "0.63616836", "0.63427144", "0.63147855", "0.6242766", "0.61260843", "0.60846496", "0.6079177", "0.6052...
0.750709
1
Function to remove test results and confirmations older than 10 blocks
async def cleanTestResults(CURRENT_HEIGHT): LAST_GOOD_HEIGHT = int(CURRENT_HEIGHT) - 10 for testId in list(testResults): if int(testId) <= LAST_GOOD_HEIGHT: del testResults[testId] for testId in list(testConfirmations): if int(testId) <= LAST_GOOD_HEIGHT: del testConfirmations[testId]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def remaining_batch_tests(loaded_batch_tests):\n remaining_tests = batch_test_set - set(loaded_batch_tests)\n with open('remaining_tests.txt', mode='w') as outfile:\n for batch_test in remaining_tests:\n outfile.write(\"%s\\n\" % batch_test)", "def clean_leftovers(tests):\n for test in...
[ "0.6036735", "0.58126444", "0.5707902", "0.5680992", "0.5655867", "0.56548756", "0.5543772", "0.5505886", "0.5502805", "0.5481863", "0.54809284", "0.5464488", "0.5420158", "0.5416538", "0.5402889", "0.5371889", "0.5359482", "0.5342731", "0.53407174", "0.5334368", "0.5320321",...
0.73820996
0
POST handler that accepts uploads of files. It does not store the content of the file, just creates entry in the dictionary to keep track of upload activity.
async def UploadHandlerMem(request): # You cannot rely on Content-Length if transfer is chunked. try: fileSize = 0 timestampStart = datetime.utcnow() usedHandler = 'UploadHandlerMem' try: testId = request.match_info['testId'] except Exception as e: testId = "noId" try: sourceHostPort = request.match_info['sourceHostPort'] except Exception as e: sourceHostPort = "noPort" try: fileName = request.match_info['fileName'] except Exception as e: fileName = "noFileName" while True: chunk, is_end_of_http_chunk = await request.content.readchunk() if not chunk: break fileSize += len(chunk) peername = request.transport.get_extra_info('peername') if peername is not None: host, port = peername else: host = 'nohost' timestampEnd = datetime.utcnow() taskDuration = str(timestampEnd - timestampStart) if testId != "noId" and sourceHostPort != "noPort" and fileName != "noFileName": testConfirmations.setdefault(testId, {}).setdefault(NODE_ADDRESS[0]+':'+str(HOST_PORT), {}).setdefault(host+':'+sourceHostPort, {})['UploadHandlerMem'] = { 'status': 'success', 'message': 'File uploaded', 'testId': testId, 'usedHandler': 'UploadHandlerMem', 'taskDuration': taskDuration, 'fileName': fileName, 'sourceFileName': '10k.txt', 'fileSize': fileSize, 'timestampStart': str(timestampStart), 'timestampEnd': str(timestampEnd) } response_obj = { 'status': 'success', 'message': 'File uploaded to memory', 'testId': testId, 'usedHandler': usedHandler, 'taskDuration': taskDuration, 'fileName': fileName, 'fileSize': fileSize, 'timestampStart': str(timestampStart), 'timestampEnd': str(timestampEnd) } return web.json_response(response_obj) except Exception as e: response_obj = { 'status' : 'failed', 'reason': str(e) } print(str(e)) return web.json_response(response_obj)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def post(self):\n filename = str(time.time())\n filepath = os.path.join(\n os.path.join(current_app.config['UPLOAD_FOLDER'], filename))\n with open(filepath, 'bw') as uploadfile:\n chunk_size = 1024\n while True:\n chunk = request.stream.read(chu...
[ "0.7198825", "0.7094963", "0.69860333", "0.69567823", "0.6920896", "0.6851648", "0.68024033", "0.6728201", "0.6680211", "0.65697706", "0.6507112", "0.6479238", "0.6470818", "0.64561504", "0.64233595", "0.64167655", "0.6380935", "0.63550353", "0.62920946", "0.62501407", "0.623...
0.0
-1
POST handler that uploads file to remote host.
async def UploadToRemoteNodeHandler(request): fileSize = 0 timestampStart = datetime.utcnow() usedHandler = 'uploadFileToRemoteNode' try: data = await request.post() try: testId = data['testId'] except Exception as e: testId = str(uuid.uuid4()) try: fileName = data['fileName'] except Exception as e: fileName = '1k.txt' try: destinationFileName = data['destinationFileName'] except Exception as e: destinationFileName = fileName try: destinationHost = data['destinationHost'] except Exception as e: print('no destinationHost') fileUrl = 'http://'+destinationHost+'/uploadFile/'+testId #print(fileUrl) async with ClientSession() as session: try: async with ClientSession() as session: async with session.post(fileUrl, data ={ 'testId': testId, 'fileName': destinationFileName, 'file': download_files[fileName] }) as response: data = await response.json() fileSize = len(download_files[fileName]) timestampEnd = datetime.utcnow() taskDuration = str(timestampEnd - timestampStart) testResults.setdefault(testId, {}).setdefault(usedHandler, {})["fileName"] = fileName testResults.setdefault(testId, {}).setdefault(usedHandler, {})["destinationFileName"] = destinationFileName testResults.setdefault(testId, {}).setdefault(usedHandler, {})["fileSize"] = fileSize testResults.setdefault(testId, {}).setdefault(usedHandler, {})["timestampStart"] = str(timestampStart) testResults.setdefault(testId, {}).setdefault(usedHandler, {})["timestampEnd"] = str(timestampEnd) testResults.setdefault(testId, {}).setdefault(usedHandler, {})["taskDuration"] = taskDuration response_obj = { 'status': 'success', 'message': 'File uploaded', 'testId': testId, 'usedHandler': usedHandler, 'taskDuration': taskDuration, 'fileName': fileName, 'destinationFileName': destinationFileName, 'fileSize': fileSize, 'timestampStart': str(timestampStart), 'timestampEnd': str(timestampEnd) } return web.json_response(response_obj) except Exception as e: response_obj = { 'status' : 'failed', 'message': str(e) } return web.json_response(response_obj) except Exception as e: response_obj = { 'status' : 'failed', 'message': str(e) } return web.json_response(response_obj)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def post(self):\n filename = str(time.time())\n filepath = os.path.join(\n os.path.join(current_app.config['UPLOAD_FOLDER'], filename))\n with open(filepath, 'bw') as uploadfile:\n chunk_size = 1024\n while True:\n chunk = request.stream.read(chu...
[ "0.68752784", "0.672209", "0.6701313", "0.66228", "0.6602018", "0.6584897", "0.6579177", "0.65558815", "0.6509413", "0.65081227", "0.64988804", "0.6480952", "0.6429412", "0.64271265", "0.6385441", "0.6369564", "0.63390976", "0.63218266", "0.6314229", "0.63084084", "0.6304595"...
0.577518
96
POST handler that downloads file from remote host.
async def DownloadFromRemoteNodeHandler(request): fileSize = 0 timestampStart = datetime.utcnow() usedHandler = 'downloadFileFromRemoteNode' try: data = await request.post() try: testId = data['testId'] except Exception as e: print('no testId, generating own') testId = str(uuid.uuid4()) try: fileName = data['destinationFileName'] except Exception as e: print('no fileName, generating own') fileName = "".join(choice(allchar) for x in range(randint(5, 5))) try: sourceHost = data['sourceHost'] sourceFileName = data['sourceFileName'] except Exception as e: print('no sourceHost and sourceFileName') fileUrl = 'http://'+sourceHost+'/downloadFile/'+sourceFileName+'/'+testId async with ClientSession() as session: try: async with session.get(fileUrl) as response: async for data in response.content.iter_chunked(1024): fileSize += len(data) timestampEnd = datetime.utcnow() taskDuration = str(timestampEnd - timestampStart) testResults.setdefault(testId, {}).setdefault(usedHandler, {})["fileName"] = fileName testResults.setdefault(testId, {}).setdefault(usedHandler, {})["sourceFileName"] = sourceFileName testResults.setdefault(testId, {}).setdefault(usedHandler, {})["fileSize"] = fileSize testResults.setdefault(testId, {}).setdefault(usedHandler, {})["timestampStart"] = str(timestampStart) testResults.setdefault(testId, {}).setdefault(usedHandler, {})["timestampEnd"] = str(timestampEnd) testResults.setdefault(testId, {}).setdefault(usedHandler, {})["taskDuration"] = taskDuration response_obj = { 'status': 'success', 'message': 'File downloaded', 'testId': testId, 'usedHandler': usedHandler, 'taskDuration': taskDuration, 'fileName': fileName, 'sourceFileName': sourceFileName, 'fileSize': fileSize, 'timestampStart': str(timestampStart), 'timestampEnd': str(timestampEnd) } return web.json_response(response_obj) except Exception as e: response_obj = { 'status' : 'failed', 'message': str(e) } return web.json_response(response_obj) except Exception as e: response_obj = { 'status' : 'failed', 'message': str(e) } return web.json_response(response_obj)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def download_file(self, remote_file):\n remote_file.download()", "def download_file():\n\n if 'POST' == request.method:\n file_id = request.form['file_id']\n else:\n file_id = request.args.get('file_id')\n\n # 1 ==> example_1.tgz\n file_path = file_manager.get_file_path_from_id(f...
[ "0.70488554", "0.7008286", "0.6648603", "0.65757704", "0.6246295", "0.61764085", "0.6117598", "0.611518", "0.608899", "0.608899", "0.608899", "0.608899", "0.608899", "0.608899", "0.608899", "0.608899", "0.608899", "0.608899", "0.608899", "0.608899", "0.608899", "0.608899", ...
0.0
-1
calculates the information reduction between layers This function computes the multiinformation (total correlation) reduction after a linear transformation.
def information_reduction( X: np.ndarray, Y: np.ndarray, uni_entropy: Callable, tol_dims: int, p: float = 0.25, ) -> float: # calculate the marginal entropy hx = jax.vmap(uni_entropy)(X.T) hy = jax.vmap(uni_entropy)(Y.T) # Information content delta_info = np.sum(hy) - np.sum(hx) tol_info = np.sqrt(np.sum((hy - hx) ** 2)) # get tolerance n_dimensions = X.shape[1] # conditional cond = np.logical_or( tol_info < np.sqrt(n_dimensions * p * tol_dims ** 2), delta_info < 0 ) return np.array(np.where(cond, 0.0, delta_info))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def MotcorCatenate(self, info, base, anat_tgt):\n# First compute the transformation matrices due to epi-to-epi motion.\n fmt = '3dvolreg -prefix NULL -1Dmatrix_save %s -twopass ' + \\\n '-verbose -base %s+orig[%s] -dfile %s %s+orig'\n cmd = fmt % (info['matfile_m'], i...
[ "0.5332517", "0.532662", "0.53114045", "0.5299733", "0.52236503", "0.5153382", "0.51122165", "0.5084768", "0.50838053", "0.50646925", "0.505108", "0.5036968", "0.5031033", "0.50278485", "0.50072664", "0.50027597", "0.4986492", "0.49739137", "0.49683747", "0.4963426", "0.49604...
0.45976505
86
Reads choice and directs on a path depending on input
def main_page(self): choice = "" while choice != "x": header, main_menu, choices, underline = self.__get_format.main_menu_format() choice = self.__main_menu.main_page(header,main_menu,choices,underline) if choice == "1": self.__rent_controller.Rent_page() elif choice == "2": try_again = "" while try_again != "n": try_again, valid = self.__salesman_controller.sign_in_page() if valid == True: self.__salesman_controller.salesman_menu() elif choice == "3": self.__order_controller.find_order_process(page=2) elif choice == "i": self.__information_controller.information_page()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def warriorPath1():\n with open('stories/warrior/warrior_path1.txt') as f:\n path1 = f.read()\n print(path1.format(NAME))\n while True:\n print(\"What will you do?\")\n print(\"1. Don't light the torch and keep walking\")\n print(\"2. Light the torch with magic\")\n...
[ "0.6550537", "0.62204075", "0.61774844", "0.61713785", "0.6165053", "0.61213124", "0.6097766", "0.6087929", "0.60319537", "0.60028327", "0.59654415", "0.5920424", "0.59185994", "0.59036005", "0.5848893", "0.5798395", "0.5788869", "0.5716511", "0.5700164", "0.56988907", "0.568...
0.0
-1
Run all dispatch tests
def dispatch(): suite = ServiceTestSuite() suite.addTest(unittest.makeSuite(AmazonTestCase, 'test_dispatch')) return suite
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def runTests(self):\n \n pass", "def doAllTests(self):\n # Initial offset\n self.getAlertsFile()\n self.offset = self.getOffset(self.config.get('PATHS', 'tempfile'))\n\n # Do all tests\n # As the socket is not persistent, client side attacks have to be done before all...
[ "0.7445618", "0.72676694", "0.72357136", "0.7137783", "0.69886786", "0.69496655", "0.69481313", "0.6913679", "0.6835688", "0.6833541", "0.68203133", "0.68139446", "0.6779515", "0.67011243", "0.6626831", "0.66243935", "0.6614983", "0.65842265", "0.6561729", "0.6560582", "0.652...
0.6487395
21
Run all local tests
def local(): suite = ServiceTestSuite() suite.addTest(unittest.makeSuite(AmazonTestCase, 'test_local')) return suite
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _run_local_tests(self, *args, **kwargs):\n pass", "def runAll():\n\n loader = unittest.TestLoader()\n test_dir = pkg_resources.resource_filename('frvcpy.test','.')\n suite = loader.discover(test_dir)\n\n runner = unittest.TextTestRunner(verbosity=2)\n runner.run(suite)", "def main():\...
[ "0.8297117", "0.7959936", "0.7950929", "0.7797158", "0.768714", "0.766804", "0.76199144", "0.75607073", "0.7456279", "0.744708", "0.7380929", "0.7380103", "0.72950643", "0.7281221", "0.727362", "0.7181298", "0.71580607", "0.7139827", "0.7125284", "0.70671266", "0.7064775", ...
0.0
-1
Run all network tests
def net(): suite = ServiceTestSuite() suite.addTest(unittest.makeSuite(AmazonTestCase, 'test_net')) return suite
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_all():\n test_get_to()\n test_error_type()\n test_exchange()\n print(\"All tests passed.\")", "def run_all_tests():\n remove_dbs()\n run_training_tests()\n run_custom_training_tests()\n run_training_save_tests()\n run_validation_tests()\n run_feature_extraction_tests()", ...
[ "0.7459378", "0.74455136", "0.7278033", "0.71453613", "0.713573", "0.7003208", "0.69377714", "0.6935737", "0.6916369", "0.68943274", "0.6879508", "0.6857769", "0.67931044", "0.6778059", "0.6765904", "0.675124", "0.67291695", "0.66894144", "0.66677123", "0.6644143", "0.6638301...
0.0
-1
enforceZeroSlope ensures that the last slope of all elbow fits is always 0
def find_elbows_per_boots(dfr, nElbows, enforceZeroSlope=False): rows = [] for dotmode, dfp in dfr.groupby('dotmode'): if dotmode == '3d': # always take out 3d's first two dis # since pcor's correlation with duration is too weak to fit pmf dfp = remove_dis(dfp, [1, 2], dotmode) for bi, dfpts in dfp.groupby('bi'): row = find_elbows_one_boot(dfpts, nElbows, enforceZeroSlope) row.update({'dotmode': dotmode, 'bi': bi}) rows.append(row) return pd.DataFrame(rows)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def fit_slope_with_zero_intercept_residue(X,Y):\n X = np.array(X)\n Y = np.array(Y)\n slope = np.sum(Y*X)/np.sum(np.power(X,2))\n return slope*X - Y", "def zeroCrossing(self,evap_threshold):\r\n\t\tself.splitBaseline =(np.mean(self.splitData[0:10]))\t\r\n\t\tsplit_max_index = np.argmax(self.splitDat...
[ "0.61061126", "0.5953062", "0.5737827", "0.5657327", "0.56395733", "0.56212384", "0.5598636", "0.558573", "0.55028105", "0.5377003", "0.53153276", "0.5299184", "0.52155274", "0.52150947", "0.5214609", "0.5210322", "0.52011937", "0.5195882", "0.51850694", "0.5184016", "0.51615...
0.0
-1
Instance data use_wsdl if True try to construct XML Instance from information in WSDL.
def __init__(self, wsdl, service=None, port=None, tracefile=None, typesmodule=None, nsdict=None, soapAction=None, ns=None, op_ns=None, use_wsdl=False): if not hasattr(wsdl, 'targetNamespace'): wsdl = wstools.WSDLTools.WSDLReader().loadFromURL(wsdl) # for item in wsdl.types.items(): # self._serializer.loadSchema(item) self._service = wsdl.services[service or 0] self.__doc__ = self._service.documentation self._port = self._service.ports[port or 0] self._name = self._service.name self._wsdl = wsdl self._tracefile = tracefile self._typesmodule = typesmodule self._nsdict = nsdict or {} self._soapAction = soapAction self._ns = ns self._op_ns = op_ns self._use_wsdl = use_wsdl binding = self._port.getBinding() portType = binding.getPortType() for item in portType.operations: callinfo = wstools.WSDLTools.callInfoFromWSDL(self._port, item.name) method = MethodProxy(self, callinfo) setattr(self, item.name, method)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _prepare_wsdl_objects(self):\r\n # This holds some optional options for the request..\r\n self.AddressValidationOptions = self.client.factory.create('AddressValidationOptions')\r\n \r\n # This is good to review if you'd like to see what the data structure\r\n ...
[ "0.5577236", "0.5367035", "0.5321477", "0.5150564", "0.50830454", "0.5067312", "0.4986971", "0.49749762", "0.4935389", "0.48817945", "0.4843432", "0.48354465", "0.48144224", "0.47744632", "0.47571477", "0.47335753", "0.47333562", "0.47290888", "0.47188824", "0.46990353", "0.4...
0.54205626
1
Call the named remote web service method.
def _call(self, name, *args, **kwargs): if len(args) and len(kwargs): raise TypeError( 'Use positional or keyword argument only.' ) callinfo = getattr(self, name).callinfo soapAction = callinfo.soapAction url = callinfo.location (protocol, host, uri, query, fragment, identifier) = urlparse(url) port = '80' if host.find(':') >= 0: host, port = host.split(':') binding = Binding(host=host, tracefile=self._tracefile, ssl=(protocol == 'https'), port=port, url=None, typesmodule=self._typesmodule, nsdict=self._nsdict, soapaction=self._soapAction, ns=self._ns, op_ns=self._op_ns) if self._use_wsdl: request, response = self._getTypeCodes(callinfo) if len(kwargs): args = kwargs if request is None: request = Any(oname=name) binding.Send(url=uri, opname=None, obj=args, nsdict=self._nsdict, soapaction=soapAction, requesttypecode=request) return binding.Receive(replytype=response) apply(getattr(binding, callinfo.methodName), args) return binding.Receive()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _call(self, rpc_method_name, *args, **kwargs):\n method = getattr(self, rpc_method_name)\n return method(*args, **kwargs)", "def _method_call(self, msg):\n #print(\"Performing service: %s, method_name: %s\" % (msg.service_name, msg.method_name))\n service = self._services.get(msg....
[ "0.72344095", "0.7016205", "0.6945265", "0.6935145", "0.6773144", "0.6740759", "0.6691549", "0.667169", "0.6600439", "0.6472304", "0.63347936", "0.62916875", "0.6260245", "0.6260234", "0.62297845", "0.6161755", "0.61566997", "0.6142578", "0.60809994", "0.6068365", "0.6018835"...
0.6306776
11
Returns typecodes representing input and output messages, if request and/or response fails to be generated return None for either or both. callinfo WSDLTools.SOAPCallInfo instance describing an operation.
def _getTypeCodes(self, callinfo): prefix = None self._resetPrefixDict() if callinfo.use == 'encoded': prefix = self._getPrefix(callinfo.namespace) try: requestTC = self._getTypeCode(parameters=callinfo.getInParameters(), literal=(callinfo.use=='literal')) except EvaluateException, ex: print "DEBUG: Request Failed to generate --", ex requestTC = None self._resetPrefixDict() try: replyTC = self._getTypeCode(parameters=callinfo.getOutParameters(), literal=(callinfo.use=='literal')) except EvaluateException, ex: print "DEBUG: Response Failed to generate --", ex replyTC = None request = response = None if callinfo.style == 'rpc': if requestTC: request = TC.Struct(pyclass=None, ofwhat=requestTC, pname=callinfo.methodName) if replyTC: response = TC.Struct(pyclass=None, ofwhat=replyTC, pname='%sResponse' %callinfo.methodName) else: if requestTC: request = requestTC[0] if replyTC: response = replyTC[0] #THIS IS FOR RPC/ENCODED, DOC/ENCODED Wrapper if request and prefix and callinfo.use == 'encoded': request.oname = '%(prefix)s:%(name)s xmlns:%(prefix)s="%(namespaceURI)s"' \ %{'prefix':prefix, 'name':request.oname, 'namespaceURI':callinfo.namespace} return request, response
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def GetModelOutputInfo(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')", "def verify_call(obj):\n\tassert obj.tag == 'OMOBJ'\n\tattr = obj[0]\n\t\n\tassert attr.tag == 'OMAT...
[ "0.5174658", "0.5018817", "0.4834404", "0.47635424", "0.4631196", "0.46073565", "0.45646647", "0.45536888", "0.4535534", "0.45211482", "0.44949257", "0.4465704", "0.44512537", "0.4426451", "0.44040138", "0.44033703", "0.43914264", "0.43877032", "0.4385859", "0.43828747", "0.4...
0.73895764
0
Returns typecodes representing a parameter set parameters list of WSDLTools.ParameterInfo instances representing the parts of a WSDL Message.
def _getTypeCode(self, parameters, literal=False): ofwhat = [] for part in parameters: namespaceURI,localName = part.type if part.element_type: #global element element = self._wsdl.types[namespaceURI].elements[localName] tc = self._getElement(element, literal=literal, local=False, namespaceURI=namespaceURI) else: #local element name = part.name typeClass = self._getTypeClass(namespaceURI, localName) if not typeClass: tp = self._wsdl.types[namespaceURI].types[localName] tc = self._getType(tp, name, literal, local=True, namespaceURI=namespaceURI) else: tc = typeClass(name) ofwhat.append(tc) return ofwhat
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def parameterTypes(self, p_int): # real signature unknown; restored from __doc__\n return []", "def getParamsType(self):\n\t\treturn [\"int\", \"int\"]", "def parameterNames(self, p_int): # real signature unknown; restored from __doc__\n return []", "def _fi_in_parameters(self) -> List[Tuple[st...
[ "0.6973047", "0.63543373", "0.6333325", "0.628187", "0.62337446", "0.6057316", "0.6028395", "0.6020107", "0.60162276", "0.5864634", "0.5859402", "0.58460873", "0.5785523", "0.5735038", "0.5657189", "0.5608447", "0.56071746", "0.5583623", "0.55738485", "0.55229664", "0.5505756...
0.61186475
5
namespaces typecodes representing global elements with literal encoding. typeCode typecode representing an element. namespaceURI namespace literal True/False
def _globalElement(self, typeCode, namespaceURI, literal): if literal: typeCode.oname = '%(prefix)s:%(name)s xmlns:%(prefix)s="%(namespaceURI)s"' \ %{'prefix':self._getPrefix(namespaceURI), 'name':typeCode.oname, 'namespaceURI':namespaceURI}
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def XmlTypeNamespace(self) -> str:", "def is_namespace_type(self):\n raise exceptions.NotImplementedError()", "def GetNamespaces(self):\n return list(self.type_namespaces_map.values())", "def element_type(self) -> global___Type:", "def patch_well_known_namespaces(etree_module):\n etree_module....
[ "0.66442066", "0.5593534", "0.5443724", "0.5412149", "0.5365882", "0.5329653", "0.5311929", "0.5237586", "0.5178215", "0.5165827", "0.5055696", "0.5044016", "0.5007665", "0.4926739", "0.48959085", "0.48641986", "0.48631665", "0.4855509", "0.48434836", "0.48183277", "0.4792356...
0.6797826
0
Retrieves a prefix/namespace mapping. namespaceURI namespace
def _getPrefix(self, namespaceURI): prefixDict = self._getPrefixDict() if prefixDict.has_key(namespaceURI): prefix = prefixDict[namespaceURI] else: prefix = 'ns1' while prefix in prefixDict.values(): prefix = 'ns%d' %int(prefix[-1]) + 1 prefixDict[namespaceURI] = prefix return prefix
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def prefix_to_ns(self, prefix):\n defin = self.module.i_ctx.get_module(\n self.module.i_prefixes[prefix][0])\n return defin.search_one(\"namespace\").arg", "def get_namespace(self, prefix):\n try:\n return self.parser.namespaces[prefix]\n except KeyError as err:\...
[ "0.74894905", "0.723001", "0.7178781", "0.7092518", "0.7036941", "0.6799343", "0.67157125", "0.67054284", "0.66477394", "0.6579663", "0.64730036", "0.646861", "0.6420464", "0.64164484", "0.64045894", "0.63432497", "0.63370234", "0.63120365", "0.63070714", "0.62537974", "0.623...
0.78372264
0