query
stringlengths
9
3.4k
document
stringlengths
9
87.4k
metadata
dict
negatives
listlengths
4
101
negative_scores
listlengths
4
101
document_score
stringlengths
3
10
document_rank
stringclasses
102 values
Turn all slots of currClusters to zero.
def make_all_zero(curr_clusters, k, num_of_cords): for i in range(k): for j in range(num_of_cords): curr_clusters[i][j] = 0
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _empty_clusters(clusters):\n for clst in clusters:\n clst.points = []", "def reset(self):\n self._clusters = {}\n self._clusters_val = {}\n self._centroids = {}\n self.store()", "def zero_cluster():\n if _TRAFFICCTL:\n cmd = _traffic_ctl(\"metric\", \"clear\"...
[ "0.6796202", "0.64024293", "0.63158333", "0.62675047", "0.6168588", "0.6164785", "0.6119265", "0.60707", "0.6068551", "0.6046783", "0.595144", "0.5930416", "0.59281564", "0.5927291", "0.59178776", "0.5910214", "0.5910083", "0.5907188", "0.5899487", "0.58834374", "0.585963", ...
0.74820185
0
Check if clusters1 equals to clusters2, return True if so, or False if not.
def is_converged(clusters1, clusters2, k, num_of_cords): for i in range(k): for j in range(num_of_cords): if clusters1[i][j] != clusters2[i][j]: return False return True
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def identical_cds(sc1,sc2):\n # Input 2 identical segment chains, return True if cds the same\n if sc1.covers(sc2) and sc2.covers(sc1):\n return True\n else:\n return False", "def are_clusters_similar(cls, c1, c2, proportion=0.8):\n if len(c1.indices) > len(c2.indices):\n ...
[ "0.6819774", "0.67053854", "0.6577482", "0.6567598", "0.6566153", "0.6465101", "0.6442963", "0.6380351", "0.6379164", "0.6357021", "0.63215804", "0.6296718", "0.62854797", "0.6203128", "0.62005913", "0.61896455", "0.6144872", "0.61378944", "0.6137696", "0.61299753", "0.610979...
0.8044068
0
Print error message and exit.
def call_error(): print("Error in input format.") sys.exit()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def error(message):\n print message\n sys.exit(2)", "def error(msg):\n print 'ERROR: %s' % msg\n sys.exit(1)", "def ErrorExit(msg):\r\n print >>sys.stderr, msg\r\n sys.exit(1)", "def print_error_and_exit(error_message):\n\n print(\"Error: \" + error_message)\n sys.exit()", "def error(ms...
[ "0.8682652", "0.83133286", "0.8252245", "0.81974244", "0.81944567", "0.81696945", "0.8102151", "0.80890137", "0.80763304", "0.8058573", "0.8032233", "0.8027259", "0.8015662", "0.799886", "0.79149866", "0.78957844", "0.78918314", "0.7880633", "0.78051", "0.78021055", "0.774882...
0.7875015
18
Print the coordinates as needed for the exercise.
def print_final_res(res, k, num_of_cords): for t in range(k): for p in range(num_of_cords): counter = 1 num = str(float("{:.4f}".format(res[t][p]))).split(".") integ = list(map(str, num[0])) frac = list(map(int, num[1])) for digit in integ: if digit == "-": print(digit, end="") elif counter <= 5: print(int(digit), end="") counter += 1 if counter <= 5: print(".", end="") for digit in frac: if counter <= 5: print(digit, end="") counter += 1 while counter <= 5: print(0, end="") counter += 1 if p == num_of_cords-1: print("") else: print(",", end="")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def print_my_coords(x = 0, y = 0):\n\tprint(\"Coords (%d;%d)\" % (x,y))", "def display(self):\n \"\"\" Coordinates for position are x-axis (LR) and y-axis (NS) \"\"\"\n for coordY in range(self.y):\n print()\n for column in range(self.height):\n for coordLR in range(sel...
[ "0.7668902", "0.72341406", "0.6983642", "0.68224555", "0.6820623", "0.6797979", "0.6736476", "0.6682067", "0.66407675", "0.6639759", "0.6600853", "0.65653497", "0.6540607", "0.6519379", "0.6514798", "0.64698935", "0.6465841", "0.64637744", "0.64516777", "0.64497644", "0.64263...
0.0
-1
Given batch of anchor descriptors and positive descriptors calculate euclidean distance matrix
def distance_matrix_vector(anchor, positive): # here anchor*anchor is equal torch.mul(anchor, anchor) # the element-wise value multiplication is returned d1_sq = torch.sum(anchor * anchor, dim=1).unsqueeze(-1) d2_sq = torch.sum(positive * positive, dim=1).unsqueeze(-1) eps = 1e-6 # tensor.repeat(): repeat at each dims, and dims from right to left return torch.sqrt((d1_sq.repeat(1, anchor.size(0)) + torch.t(d2_sq.repeat(1, positive.size(0))) - 2.0 * torch.bmm(anchor.unsqueeze(0), torch.t(positive).unsqueeze(0)).squeeze(0)) +eps)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _derive_euclidean_dm(self, cat_mat, dim):\r\n res_mat = []\r\n\r\n for i in range(dim):\r\n res_mat.append([0 for k in range(dim)])\r\n for j in range(i):\r\n res_mat[i][j] = self._vector_dist(cat_mat[i], cat_mat[j])\r\n res_mat[j][i] = res_mat[...
[ "0.661237", "0.65518934", "0.65397006", "0.636195", "0.6311224", "0.6290709", "0.6187235", "0.61502486", "0.61339396", "0.61114454", "0.61069995", "0.6097225", "0.6097225", "0.6093995", "0.6067778", "0.6055344", "0.6040386", "0.6026841", "0.60233694", "0.6018614", "0.6015458"...
0.60668904
15
Given batch of anchor descriptors and positive descriptors calculate distance matrix The distance metric is Euclidean distance L2dist
def distance_vectors_pairwise(anchor, positive, negative): a_sq = torch.sum(anchor * anchor, dim=1) p_sq = torch.sum(positive * positive, dim=1) n_sq = torch.sum(negative * negative, dim=1) eps = 1e-8 d_a_p = torch.sqrt(a_sq + p_sq - 2*torch.sum(anchor * positive, dim = 1) + eps) d_a_n = torch.sqrt(a_sq + n_sq - 2*torch.sum(anchor * negative, dim = 1) + eps) d_p_n = torch.sqrt(p_sq + n_sq - 2*torch.sum(positive * negative, dim = 1) + eps) return d_a_p, d_a_n, d_p_n
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def compute_l2_distance_matrix(features_queries, features_dataset):\n sx = np.sum(features_queries ** 2, axis=1, keepdims=True)\n sy = np.sum(features_dataset ** 2, axis=1, keepdims=True)\n\n return np.sqrt(-2 * features_queries.dot(features_dataset.T) + sx + sy.T)", "def distance(self, features, target...
[ "0.6684549", "0.6541754", "0.6541754", "0.6529875", "0.64755595", "0.64498234", "0.6433003", "0.64293146", "0.6418028", "0.6330827", "0.6323023", "0.6297623", "0.62218076", "0.6159287", "0.6140069", "0.6132013", "0.6113837", "0.6111654", "0.61090785", "0.6100563", "0.60982287...
0.5747658
69
Loss with random sampling (no hard in batch).
def loss_random_sampling(anchor, positive, negative, anchor_swap = False, margin = 1.0, loss_type = "triplet_margin"): assert anchor.size() == positive.size(), "Input sizes between positive and negative must be equal." assert anchor.size() == negative.size(), "Input sizes between positive and negative must be equal." assert anchor.dim() == 2, "Inputd must be a 2D matrix." eps = 1e-8 (pos, d_a_n, d_p_n) = distance_vectors_pairwise(anchor, positive, negative) # distance based anchor, if anchor swap, get the min(anchor, positive) if anchor_swap: min_neg = torch.min(d_a_n, d_p_n) else: min_neg = d_a_n if loss_type == "triplet_margin": # the func is (m + D_p - D_n) loss = torch.clamp(margin + pos - min_neg, min=0.0) elif loss_type == 'softmax': # here the output is 2-class log-softmax loss(1/0) from L2Net exp_pos = torch.exp(2.0 - pos) exp_den = exp_pos + torch.exp(2.0 - min_neg) + eps loss = - torch.log( exp_pos / exp_den ) elif loss_type == 'contrastive': loss = torch.clamp(margin - min_neg, min=0.0) + pos else: print ('Unknown loss type. Try triplet_margin, softmax or contrastive') sys.exit(1) loss = torch.mean(loss) return loss
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def gen_loss_orig(self, noise_samples):\n generator_samples = self.gen_model(noise_samples)\n logits_gen = self.disc_model(generator_samples)\n # loss = -tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(labels=tf.zeros(logits_gen.shape), logits=logits_gen))\n loss = tf.reduce_mean...
[ "0.6654641", "0.6595753", "0.65544915", "0.64899355", "0.6487611", "0.6350678", "0.63220733", "0.62942404", "0.6251492", "0.62248963", "0.61995137", "0.61536497", "0.61419076", "0.6131341", "0.6121647", "0.6115955", "0.6109206", "0.60922635", "0.6075736", "0.6044063", "0.6037...
0.5773103
79
HardNet margin loss calculates loss based on distance matrix based on positive distance and closest negative distance.
def loss_HardNet(anchor, positive, anchor_swap = False, anchor_ave = False, margin = 1.0, batch_reduce = 'min', loss_type = "triplet_margin"): assert anchor.size() == positive.size(), "Input sizes between positive and negative must be equal." assert anchor.dim() == 2, "Inputd must be a 2D matrix." eps = 1e-8 dist_matrix = distance_matrix_vector(anchor, positive) +eps # D = A_t*P eye = torch.autograd.Variable(torch.eye(dist_matrix.size(1))).cuda() # steps to filter out same patches that occur in distance matrix as negatives pos1 = torch.diag(dist_matrix) dist_without_min_on_diag = dist_matrix+eye*10 # get all the indices which value<0.008 mask = (dist_without_min_on_diag.ge(0.008).float()-1.0)*(-1) mask = mask.type_as(dist_without_min_on_diag)*10 dist_without_min_on_diag = dist_without_min_on_diag+mask # sampling strategy of Hardest in batch if batch_reduce == 'min': # mining the value < 0.008(without mining on the diagonal) min_neg = torch.min(dist_without_min_on_diag, 1)[0] if anchor_swap: min_neg2 = torch.min(dist_without_min_on_diag, 0)[0] min_neg = torch.min(min_neg, min_neg2) """ print for debug dist_matrix_a = distance_matrix_vector(anchor, anchor)+ eps dist_matrix_p = distance_matrix_vector(positive,positive)+eps dist_without_min_on_diag_a = dist_matrix_a+eye*10 dist_without_min_on_diag_p = dist_matrix_p+eye*10 min_neg_a = torch.min(dist_without_min_on_diag_a,1)[0] min_neg_p = torch.t(torch.min(dist_without_min_on_diag_p,0)[0]) min_neg_3 = torch.min(min_neg_p,min_neg_a) min_neg = torch.min(min_neg,min_neg_3) print (min_neg_a) print (min_neg_p) print (min_neg_3) print (min_neg) """ min_neg = min_neg pos = pos1 elif batch_reduce == 'average': # why repeat pos value here? pos = pos1.repeat(anchor.size(0)).view(-1,1).squeeze(0) min_neg = dist_without_min_on_diag.view(-1,1) if anchor_swap: min_neg2 = torch.t(dist_without_min_on_diag).contiguous().view(-1,1) # compare anchor-pos vs. pos-anchor value min_neg = torch.min(min_neg, min_neg2) min_neg = min_neg.squeeze(0) elif batch_reduce == 'random': idxs = torch.autograd.Variable(torch.randperm(anchor.size()[0]).long()).cuda() min_neg = dist_without_min_on_diag.gather(1, idxs.view(-1,1))# dim=1, col-idx if anchor_swap: min_neg2 = torch.t(dist_without_min_on_diag).gather(1,idxs.view(-1,1)) min_neg = torch.min(min_neg, min_neg2) min_neg = torch.t(min_neg).squeeze(0) pos = pos1 else: print ('Unknown batch reduce mode. Try min, average or random') sys.exit(1) # calculate the loss depends on the loss_type if loss_type == "triplet_margin": loss = torch.clamp(margin + pos - min_neg, min=0.0) elif loss_type == 'softmax': # Softmin used here: (-x) instead of x as the input # log-likelihood cost function instead of cross-entropy cost function exp_pos = torch.exp(2.0 - pos) exp_den = exp_pos + torch.exp(2.0 - min_neg) + eps loss = - torch.log( exp_pos / exp_den ) elif loss_type == 'contrastive': loss = torch.clamp(margin - min_neg, min=0.0) + pos else: print ('Unknown loss type. Try triplet_margin, softmax or contrastive') sys.exit(1) loss = torch.mean(loss) return loss
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def TripletSemiHardLoss(y_true, y_pred, device, margin=1.0):\n\n labels, embeddings = y_true, y_pred\n\n # Reshape label tensor to [batch_size, 1].\n lshape = labels.shape\n labels = torch.reshape(labels, [lshape[0], 1])\n\n pdist_matrix = pairwise_distance_torch(embeddings, device)\n\n # Build p...
[ "0.6751771", "0.66457057", "0.6491428", "0.6382015", "0.63105524", "0.62775844", "0.627555", "0.6233407", "0.61440635", "0.61312664", "0.61160314", "0.6053419", "0.5984343", "0.5938494", "0.5920994", "0.587004", "0.58658993", "0.58524287", "0.5841395", "0.5838248", "0.5798553...
0.61158264
11
Retrieve an exchange from database by id
def get_exchange(self, id): return self.exch_repo.get(id)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getbyid(self, id):\n\n return esd.retrieve(id)", "def get(self, _id):", "def read_item(id: str, request: Request):\n obj = db.get(id, kind=endpoint_model)\n return obj", "def get(cls, id):\n\n return cls.query.get(id)", "def get(cls, id):\n\n return cls.query.get(id)"...
[ "0.67121446", "0.657574", "0.6478656", "0.64241445", "0.64241445", "0.6390948", "0.6356077", "0.63458675", "0.6295548", "0.6249574", "0.6248896", "0.62442577", "0.61988556", "0.61408186", "0.6100281", "0.6074537", "0.6065128", "0.606222", "0.6061581", "0.6003485", "0.5983606"...
0.80811805
0
Retrieve exchanges from database
def get_exchanges(self, search_criteria): return self.exch_repo.getList(search_criteria=search_criteria)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_exchanges(self):\n self.logger.debug(\"Fetching exchanges.\")\n return self._api_query(\"exchanges\")['exchanges']", "def _get_exchanges(token: str) -> List[mtypes.Exchange]:\n _LOG.info(\"Getting exchanges from API ...\")\n response = get_client().service.ExchangeList(Token=token)\n\...
[ "0.7162192", "0.6780117", "0.6251137", "0.62181497", "0.6184577", "0.60228324", "0.5948971", "0.5887415", "0.58758724", "0.5803503", "0.57923263", "0.57165515", "0.5698509", "0.5664619", "0.56348133", "0.5599414", "0.55822587", "0.5546716", "0.55274445", "0.5524719", "0.54852...
0.6667094
2
List all exchanges or one exchange if id option is given
def list_exchanges(self, search_criteria=None): if search_criteria is None: search_criteria = {} exchanges = self.get_exchanges(search_criteria) for exchange in exchanges: self.print_exchange(exchange)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_exchange(self, id):\n return self.exch_repo.get(id)", "def _get_exchanges(token: str) -> List[mtypes.Exchange]:\n _LOG.info(\"Getting exchanges from API ...\")\n response = get_client().service.ExchangeList(Token=token)\n\n exchanges = [\n mtypes.Exchange.from_dict(d=obj)\n ...
[ "0.6209942", "0.564042", "0.54858893", "0.54477894", "0.54447", "0.5443821", "0.54165643", "0.52149", "0.5213143", "0.51882815", "0.5044624", "0.49607393", "0.4930922", "0.4921522", "0.4893471", "0.48433566", "0.48054418", "0.4801602", "0.4760806", "0.47593036", "0.47506058",...
0.55560803
2
Create a new exchange in the database
def create_exchange(self, exchangename, public_key, private_key, user_id, uid = None, pw = None): if not exchangename or not public_key or not private_key: raise Exception("Exchangename, public key and private key must be given") else: return self.exch_repo.create(exchangename, public_key, private_key, user_id, uid, pw)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_exchange(self, exchange_name, exchange_type, internal=None):\n self._channel.exchange_declare(\n exchange=exchange_name,\n durable=True, # Survive reboot\n passive=False, # Perform a declare or just to see if it exists\n internal=internal, # Can only...
[ "0.6696044", "0.6593915", "0.6464137", "0.6299332", "0.6108338", "0.6079601", "0.605643", "0.60084194", "0.5968516", "0.5955321", "0.5955321", "0.5921751", "0.58424836", "0.579992", "0.57557315", "0.57557315", "0.57557315", "0.57540107", "0.569132", "0.56544197", "0.5631779",...
0.7036039
0
Delete an existing exchange from the database
def delete_exchange(self, exchange_id): if exchange_id: self.exch_repo.delete(exchange_id=exchange_id) else: raise Exception("No exchange_id found for deleting exchange.")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_delete_exchange(self):\n new_exchange = self.app.add_exchange(\"test\", \"test\", \"test\")\n ret = self.app.delete_exchange(new_exchange.id)\n self.assertIn(ret[0], \"success\")", "def test_delete_exchange_not_exists(self):\n ret = self.app.delete_exchange(20)\n self....
[ "0.77815914", "0.6905101", "0.6586099", "0.62237895", "0.61273843", "0.6105402", "0.6093326", "0.60743785", "0.607348", "0.6057913", "0.6057913", "0.60360014", "0.60360014", "0.60360014", "0.60360014", "0.60360014", "0.60360014", "0.60360014", "0.60360014", "0.60360014", "0.6...
0.74982965
1
Checks if an input pair is a valid one for given exchange
def is_valid_pair(self, pair, exchange): pairs = self.ccxt.get_pairs(exchange) print(pairs) return pair in pairs
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def validate_pairs(pairs, historical_pairs):\n if pairs is None:\n return False\n for p in pairs:\n if p in historical_pairs:\n return False\n return True", "def is_exchange_information_valid(exchange_info: Dict[str, Any]) -> bool:\n return exchange_info.get(\"status\", None)...
[ "0.6861681", "0.6571933", "0.6249077", "0.6183069", "0.6036506", "0.5871636", "0.5846162", "0.5783997", "0.5700833", "0.56721383", "0.5660363", "0.5637686", "0.5573187", "0.5572298", "0.5558149", "0.5551392", "0.55479705", "0.55122805", "0.5481704", "0.5452177", "0.53962547",...
0.8063874
0
Fetches balance for a pair on an exchange through CCXT
def fetch_balance(self, exchange, pair): return self.ccxt.fetch_balance(exchange, pair)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "async def fetch_balance(self, params={}):\n await self.load_markets()\n request = {\n 'currency': 'all',\n }\n response = await self.privateGetUserMargin(self.extend(request, params))\n #\n # [\n # {\n # \"account\":1455728,...
[ "0.7030547", "0.68978024", "0.68687195", "0.67780674", "0.6721376", "0.64743924", "0.64189506", "0.6408955", "0.63560534", "0.6347745", "0.63321614", "0.6328665", "0.6324221", "0.6236682", "0.6235676", "0.6205206", "0.61926544", "0.61854434", "0.6158296", "0.6154335", "0.6130...
0.85180926
0
Retrieves the trading fee for a certain pair on a certain exchange
def get_exchange_trading_fee(self, exchange, pair, type): return self.ccxt.get_exchange_trading_fee(exchange, pair, type)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_fee(self, pair, order_type):\n fees = self.p_state._getvalue()['fees']\n if fees:\n\n return float(fees[self._handler[order_type]][pair]['fee'])\n\n else:\n\n return 0.0", "def get_price(self, pair='XBTZAR'):\n data = {'pair': pair}\n query_string ...
[ "0.72088563", "0.6718482", "0.6621101", "0.64627856", "0.6384671", "0.630662", "0.6283862", "0.6246754", "0.62267923", "0.6090204", "0.59220403", "0.59198195", "0.58899343", "0.58853585", "0.58453304", "0.58343554", "0.5809528", "0.5797195", "0.5788672", "0.5771605", "0.57653...
0.8056572
0
Retrieves the market price for a certain pair on a certain exchange for a certain type(maker or taker)
def get_market_price(self, exchange, pair, type): return self.ccxt.get_market_price(exchange, pair, type)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_price(self, pair='XBTZAR'):\n data = {'pair': pair}\n query_string = build_query_string(data)\n\n r = requests.get(build_api_call(self.base_url, None, 'ticker', query_string))\n if r.status_code == 200:\n return r.json()", "def get_price(horizon_host, pair):\n pr...
[ "0.7793075", "0.69196445", "0.67033094", "0.6688754", "0.6671473", "0.66665936", "0.6616699", "0.6557316", "0.6485801", "0.6459023", "0.6424058", "0.6368259", "0.633056", "0.63264066", "0.6295311", "0.62687165", "0.626795", "0.62592244", "0.6171922", "0.6159443", "0.60960245"...
0.8632258
0
Place an order through the ccxt library for a certain exchange, for a certain pair (BTC/USD), type as buy/sell, and amount in currency (if BTC/USD will be BTC)
def place_order(self, exchange, pair, type, amount, price = None): return self.ccxt.place_order(exchange, pair, type, amount, price)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def buy(self, trading_pair: str, amount: Decimal, order_type: OrderType, price: Decimal) -> str:\n return self.place_order(True, trading_pair, amount, price)", "async def create_order(self, symbol: str, type: OrderType, side: OrderSide, amount, price=None, params={}):\n await self.load_markets()\n ...
[ "0.71927655", "0.7107266", "0.7054423", "0.6819442", "0.6688734", "0.66799307", "0.66651535", "0.6650478", "0.6600853", "0.6514789", "0.65064514", "0.64885443", "0.6481737", "0.6471577", "0.6377945", "0.6365583", "0.6359703", "0.63556045", "0.6327038", "0.6295299", "0.6267949...
0.76989853
0
Cancel the order through ccxt library for a certain exchange
def cancel_order(self, exchange, order_id): return self.ccxt.cancel_order(exchange, order_id)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def do_cancel(order):\r\n self.gox.cancel(order.oid)", "def cancelOrder(self, order_number):\n pass", "def _order_cancel(self, bo):\n log.info(\"bo_blotter: order_cancel bracket order bo#%s\" % bo.ticket) \n cancelled = bo.cancel()\n return(cancelled)", "def canc...
[ "0.7706166", "0.744995", "0.7211225", "0.70317745", "0.6936221", "0.6923826", "0.6887567", "0.68759936", "0.6835702", "0.6832168", "0.68217963", "0.6820352", "0.68094903", "0.67458665", "0.6741805", "0.67316127", "0.6710419", "0.67094696", "0.66826385", "0.6623428", "0.65751"...
0.75702757
1
Get the the data for a certain exchange for a given pair for the last 700 hours
def get_history_data(self, exchange, pair, timedelta): return self.ccxt.get_history_data(exchange, pair, timedelta)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_data(pair, other):\n days_ago = 7\n endtime = int(time())\n starttime = endtime - 60 * 60 * 24 * days_ago\n\n geckourl = '%s/markets?vs_currency=%s&ids=%s' % (API, pair[\"currency\"],\n pair[\"coin\"])\n liveprice = requests.get(geckourl).j...
[ "0.6836399", "0.627741", "0.6234868", "0.62244236", "0.61926", "0.6124068", "0.605874", "0.60258263", "0.5984681", "0.5967804", "0.57844293", "0.5726083", "0.57112515", "0.5710812", "0.5707336", "0.5707336", "0.569271", "0.56634426", "0.5589972", "0.5550989", "0.55486476", ...
0.68828434
0
Discovers a footprint object from a DFG
def apply(dfg, parameters=None): if parameters is None: parameters = {} parallel = {(x, y) for (x, y) in dfg if (y, x) in dfg} sequence = {(x, y) for (x, y) in dfg if not (y, x) in dfg} # replace this if needed start_activities = set(utils.dfg_utils.infer_start_activities(dfg)) # replace this if needed end_activities = set(utils.dfg_utils.infer_end_activities(dfg)) activities = set(utils.dfg_utils.get_activities_from_dfg(dfg)) return {Outputs.SEQUENCE.value: sequence, Outputs.PARALLEL.value: parallel, Outputs.START_ACTIVITIES.value: start_activities, Outputs.END_ACTIVITIES.value: end_activities, Outputs.ACTIVITIES.value: activities}
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_mgrs_footprint(mgrs_id):\n\n gzd = mgrs_id[:3]\n\n mgrs_100km_gzd = mgrs_id[3:]\n print(gzd)\n print(mgrs_100km_gzd)\n\n file_path = unzip_mgrs_shapefile(gzd)\n\n shapefile_driver = ogr.GetDriverByName(\"ESRI Shapefile\")\n\n grid_ds = shapefile_driver.Open(str(file_path), 0)\n\n la...
[ "0.5391321", "0.5379075", "0.52517396", "0.5143941", "0.51311654", "0.5087027", "0.5060385", "0.5049106", "0.5047882", "0.5021228", "0.5005856", "0.4988447", "0.49569166", "0.4952292", "0.4878632", "0.48658168", "0.47970238", "0.47567862", "0.47346526", "0.47303435", "0.47240...
0.0
-1
Used for loading in model and word2vec files from disk. Returns their objects.
def LoadSavedModels(main_model_path="main_model.pkl", cler_model_path="cler_model.pkl", word2vec_path='GoogleNews-vectors-negative300.bin'): model_main = joblib.load(main_model_path) model_cler = joblib.load(cler_model_path) word2vec = gensim.models.KeyedVectors.load_word2vec_format(word2vec_path, binary=True) return model_main,model_cler,word2vec
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def load_vectors(path, to_train=False):\n model = Word2Vec.load(path)\n\n if to_train:\n return model\n\n # In case it doesn't need to be trained, delete train code to free up ram\n word_vectors = model.wv\n\n context_vectors = dict()\n if hasattr(model, \"syn1\...
[ "0.69597363", "0.6947155", "0.6811197", "0.6807579", "0.6805793", "0.6797359", "0.6768113", "0.6706945", "0.65690553", "0.6520081", "0.6514057", "0.6461425", "0.6448351", "0.64457756", "0.64116144", "0.64102066", "0.6408863", "0.63833255", "0.63704264", "0.6347624", "0.634755...
0.7026279
0
Predict a label using the main model (clinical, clerical or other). Input is a sentence, along w/ model object and word2vec object. These objects can either be loaded from disk using the LoadSavedModels function, or, if you have just completed training, they can be passed in from the Train.Trainer class. May also adjust the probability threshold above which a clerical decision is made (good for reducing false positives). 0.98 makes for very few false positives!
def PredictLabel(sentence, model_main, word2vec, boundary=0.5): tokenized_sample = word_tokenize(re.sub("-"," ",sentence)) features = np.mean([word2vec.word_vec(w) for w in tokenized_sample if w in word2vec],axis=0) prediction = model_main.predict_proba(features.reshape(1,-1))[0] if model_main.classes_[prediction.argmax()]!="clerical": return model_main.classes_[prediction.argmax()] else: if np.max(prediction)>boundary: return "clerical" else: ranger = range(len(prediction)) del ranger[prediction.argmax()] return model_main.classes_[ranger][prediction[ranger].argmax()]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def PredictClerLabel(sentence, model_cler, word2vec):\n \n tokenized_sample = word_tokenize(re.sub(\"-\",\" \",sentence))\n features = np.mean([word2vec.word_vec(w) for w in tokenized_sample if w in word2vec],axis=0)\n prediction = model_cler.predict_proba(features.reshape(1,-1))[0]\n return model_c...
[ "0.6959012", "0.65132445", "0.65114045", "0.6501456", "0.6449135", "0.6394472", "0.6369494", "0.6363786", "0.63258064", "0.63060385", "0.62783635", "0.62781966", "0.6276569", "0.6266087", "0.62624", "0.61981785", "0.61941963", "0.61721903", "0.6151807", "0.6150158", "0.613657...
0.7438309
0
Predict a label use the clerical model. Input is a sentence, along w/ model object and word2vec object. These objects can either be loaded from disk using the LoadSavedModels function, or, if you have just completed training, they can be passed in from the Train.Trainer class.
def PredictClerLabel(sentence, model_cler, word2vec): tokenized_sample = word_tokenize(re.sub("-"," ",sentence)) features = np.mean([word2vec.word_vec(w) for w in tokenized_sample if w in word2vec],axis=0) prediction = model_cler.predict_proba(features.reshape(1,-1))[0] return model_cler.classes_[prediction.argmax()]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def PredictLabel(sentence, model_main, word2vec, boundary=0.5):\n \n tokenized_sample = word_tokenize(re.sub(\"-\",\" \",sentence))\n features = np.mean([word2vec.word_vec(w) for w in tokenized_sample if w in word2vec],axis=0)\n prediction = model_main.predict_proba(features.reshape(1,-1))[0]\n if m...
[ "0.6949452", "0.6697954", "0.6614793", "0.65763474", "0.6551022", "0.65021735", "0.6459889", "0.6424772", "0.64095986", "0.63946915", "0.6354461", "0.6343086", "0.6327474", "0.62942815", "0.62706435", "0.6258738", "0.6226277", "0.62202674", "0.62170017", "0.62144226", "0.6206...
0.7439432
0
Load IMITS data into a given Spark Context.
def extract_imits(spark_session: SparkSession) -> DataFrame: print(spark_session)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getSparkContext():\n conf = (SparkConf()\n .setMaster(\"local\") # run on local\n .setAppName(\"Logistic Regression\") # Name of App\n .set(\"spark.executor.memory\", \"1g\")) # Set 1 gig of memory\n sc = SparkContext(conf = conf) \n return sc", "def __init__(self, spark_sess...
[ "0.60987294", "0.5617119", "0.5612132", "0.5608199", "0.5594259", "0.5567496", "0.5528342", "0.5485825", "0.5481655", "0.53868705", "0.531229", "0.5279974", "0.52767175", "0.5239063", "0.5228245", "0.517938", "0.51645696", "0.51608706", "0.51583445", "0.51293147", "0.50806874...
0.5426127
9
plots the fenics mesh as it is
def plot_fenics_mesh(mesh, new_fig=True): if(new_fig): plt.figure() plot(mesh) #plt.title("FEniCS mesh") plt.show(block=False) pass
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_plot_mesh(self):\n plt.close('all')\n\n #\n # Initialize\n #\n fig, ax = plt.subplots(3,3)\n plot = Plot()\n #\n # Define mesh\n # \n mesh = Mesh.newmesh(grid_size=(2,2))\n mesh.refine() \n mesh.root_node().children[1...
[ "0.7348052", "0.7272838", "0.7201923", "0.71683663", "0.6932882", "0.6908609", "0.68675065", "0.6819857", "0.67012626", "0.66765225", "0.66062456", "0.6585177", "0.65265316", "0.6516479", "0.6515228", "0.6511229", "0.6509004", "0.643969", "0.64123374", "0.6407074", "0.6393894...
0.7717216
0
plots the mesh/centroids of mesh as is expected in peridynamics either mesh or cell_cent is to be provided by user neither provinding mesh nor providing cell_cent is wrong
def plot_peridym_mesh(mesh=None, struct_grd=True, cell_cent=None, disp_cent=None, annotate=False): if struct_grd: cell_centroid_function = structured_cell_centroids else: cell_centroid_function = get_cell_centroids if mesh == None and len(np.shape(cell_cent)) == 0 and len(np.shape(disp_cent)) == 0: raise AssertionError("provide either fenics mesh or cell centroid of PD particles") if len(np.shape(cell_cent)) != 0 and len(np.shape(disp_cent))==0: extents = get_domain_bounding_box(cell_cent=cell_cent) if len(np.shape(cell_cent)) == 0 and len(np.shape(disp_cent))!=0: extents = get_domain_bounding_box(cell_cent=disp_cent) if mesh != None and (len(np.shape(cell_cent)) == 0 and len(np.shape(disp_cent)) == 0): extents = get_domain_bounding_box(mesh=mesh) cell_cent = cell_centroid_function(mesh) ## we wish to scale the axis accordign to geometry dim = len(cell_cent[0]) x_min = extents[0][0]; x_max = extents[1][0] y_min = extents[0][1]; y_max = extents[1][1] x=None; y=None; z=None fig = plt.figure() if dim == 3: z_min = corners[0][2]; z_max = corners[1][2] x,y,z = cell_cent.T ax = fig.add_subplot(111, projection='3d') ax.scatter(x,y,z, s=70, marker='o', color='b', alpha=1.0, edgecolors='face') ax.axis('off') if dim == 2 : ax = fig.add_subplot(111) x,y = cell_cent.T plt.scatter(x,y, s=300, color='b', marker='o', alpha=0.6) plt.axis=('off') if annotate==True: for idx, cc in enumerate(cell_cent): plt.text(cc[0], cc[1], str(idx), color='k', verticalalignment='bottom', horizontalalignment='right', fontsize='medium') ax.set_aspect('equal') plt.title("peridynamics mesh") plt.show(block=False)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_plot_mesh(self):\n plt.close('all')\n\n #\n # Initialize\n #\n fig, ax = plt.subplots(3,3)\n plot = Plot()\n #\n # Define mesh\n # \n mesh = Mesh.newmesh(grid_size=(2,2))\n mesh.refine() \n mesh.root_node().children[1...
[ "0.6864378", "0.6407954", "0.6205415", "0.61812276", "0.61798674", "0.60916793", "0.609019", "0.60803694", "0.60406137", "0.59588164", "0.59557277", "0.59142935", "0.58919984", "0.58745694", "0.58541936", "0.5786406", "0.57543874", "0.5720695", "0.5686189", "0.5673838", "0.56...
0.7505498
0
plots the displaced cell centroids after a solution step. Additionally retrns the final cell centroid after additon of displacement field in the orginal configuration
def get_displaced_soln(cell_cent, u_disp, horizon, dim, data_dir=None, plot_=False, save_fig=False, zoom=40): disp_cent = cell_cent + u_disp if plot_ or save_fig: dpi = 2 legend_size = {'size': str(6*dpi)} fig = plt.figure() if dim == 2: ax = fig.add_subplot(111) x, y = cell_cent.T #plt.scatter(x,y, s=300, color='r', marker='o', alpha=0.1, label='original config') x,y = (cell_cent + zoom*u_disp).T plt.scatter(x,y, s=150, color='b', marker='o', alpha=0.6, label=r'$\delta$ = '+ format(horizon, '4.5g')) # plt.legend(prop=legend_size) #plt.xlim(x_min - fact*x_min, x_max + fact*x_max) #plt.ylim(y_min - fact*y_min, y_max + fact*y_max) if dim == 3: #z_min = corners[0][2]; z_max = corners[1][2] from mpl_toolkits.mplot3d import Axes3D x, y, z = cell_cent.T fig = plt.figure() ax = fig.add_subplot(111, projection='3d') ax.scatter(x,y,z, s=150, color='r', marker='o', alpha=0.1, label='original config') x,y,z = (cell_cent + zoom*u_disp) ax.scatter(x,y,z,s=150, color='g', marker='o', alpha=1.0, label='deformed config') ax.axis('off') plt.legend() ax.set_aspect('equal') if plot_: plt.show(block=False) if save_fig: plt.savefig(data_dir) plt.close(fig) return disp_cent
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def plotcenterrange():\n plist1 = np.arange(0.02,0.1,0.02)\n plist = np.arange(0.1,1,0.1)\n infectlist = []\n for i in plist1:\n infectlist.append(checkinfectb(0.5,20000,30,200,p = i,q = np.sqrt(2/(20000*math.pi)),startcenter=True)[0])\n for i in plist:\n infectlist.append(checkinfectb...
[ "0.61388534", "0.6034441", "0.58881974", "0.5873742", "0.58667403", "0.58461356", "0.5768717", "0.5750362", "0.56975144", "0.5666557", "0.5653945", "0.56459886", "0.5611346", "0.558924", "0.55840623", "0.5583146", "0.55808353", "0.5573108", "0.5564739", "0.5562984", "0.553712...
0.63715124
0
generates a triangulated Rectangular domain with a circular hole input
def rectangle_mesh(point1=Point(0,0), point2=Point(2,1), numptsX=10, numptsY=5): mesh = RectangleMesh(point1, point2, numptsX, numptsY ) print_mesh_stats(mesh) return mesh
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getRing(ri, ro, size=(25, 25)):\n n, m = size\n xs, ys = np.mgrid[-1:1:n * 1j, -1:1:m * 1j]\n r = np.sqrt(xs ** 2 + ys ** 2)\n\n torus = np.zeros(size, dtype=np.uint8)\n torus[(r < ro) & (r > ri)] = 1\n return torus", "def to_circular(self):\n return quad_hybrid.dot(self.linear)", "def...
[ "0.6168527", "0.5892623", "0.5813818", "0.57707417", "0.5706577", "0.55293894", "0.5516006", "0.5515859", "0.54446626", "0.54344803", "0.5432176", "0.5431236", "0.5403426", "0.54032457", "0.5320207", "0.5309659", "0.52998513", "0.5279193", "0.5279193", "0.5275167", "0.5263225...
0.0
-1
generates a triangulated Rectangular domain with a circular hole input
def rectangle_mesh_with_hole(point1=Point(0,0), point2=Point(3,1), hole_cent=Point(1.5,0.5), hole_rad=0.25, npts=15): Router = mshr.Rectangle(point1, point2) Rinner = mshr.Circle(hole_cent, hole_rad) domain = Router - Rinner mesh = mshr.generate_mesh(domain, npts) print_mesh_stats(mesh) return mesh
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getRing(ri, ro, size=(25, 25)):\n n, m = size\n xs, ys = np.mgrid[-1:1:n * 1j, -1:1:m * 1j]\n r = np.sqrt(xs ** 2 + ys ** 2)\n\n torus = np.zeros(size, dtype=np.uint8)\n torus[(r < ro) & (r > ri)] = 1\n return torus", "def to_circular(self):\n return quad_hybrid.dot(self.linear)", "def...
[ "0.6168527", "0.5892623", "0.5813818", "0.57707417", "0.5706577", "0.55293894", "0.5516006", "0.5515859", "0.54344803", "0.5432176", "0.5431236", "0.5403426", "0.54032457", "0.5320207", "0.5309659", "0.52998513", "0.5279193", "0.5279193", "0.5275167", "0.52632254", "0.5257246...
0.54446626
8
creates a structured cell centroids and cell volumes of square or cubic lattice using the fenics mesh by averaging appropriate number of 2D/3D triangles
def structured_cell_centroids(mesh): dim = mesh.topology().dim() stride = fact(dim) cents = get_cell_centroids(mesh) num_cells = int(mesh.num_cells()/stride) cell_cents_struct = np.zeros((num_cells,dim),dtype=float) for i in range(num_cells): start = int(stride*i) end = int(stride*i)+stride cell_cents_struct[i] = np.average(cents[start:end],axis=0) return cell_cents_struct
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def cfdProcessGeometry(self):\r\n \r\n # self.faceCentroids']= [[] for i in range(self.numberOfFaces'])]\r\n # self.faceSf']= [[] for i in range(self.numberOfFaces'])]\r\n # self.faceAreas']= [[] for i in range(self.numberOfFaces'])]\r\n \r\n ## Linear weight of distance from cel...
[ "0.7054485", "0.6528232", "0.64348644", "0.6230865", "0.61347985", "0.61224085", "0.60659236", "0.6059065", "0.59654796", "0.5924792", "0.5886461", "0.587063", "0.58324575", "0.5809455", "0.5807334", "0.5799182", "0.57891464", "0.5780056", "0.5752148", "0.5748409", "0.5745303...
0.68255717
1
returns an array of cell volumes of structured grid created from fenics triangular grid
def structured_cell_volumes(mesh): dim = mesh.topology().dim() stride = fact(dim) vols = get_cell_volumes(mesh) num_cells = int(mesh.num_cells()/stride) return np.ones(num_cells, dtype=float)*stride*vols[0]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_array_grid(self):\n print('Making array grid')\n grid_list = []\n or_list = [0, 0, 0]\n far_list = [0, 0, 0]\n\n for root, subdirs, files in os.walk(self.stem):\n for filename in files:\n if self.probe in filename and self.prot_name in filename a...
[ "0.6504157", "0.64335394", "0.6400999", "0.6392453", "0.63127804", "0.6261576", "0.62555546", "0.62192905", "0.6213598", "0.61927265", "0.61504424", "0.6096406", "0.6015397", "0.5978754", "0.5973437", "0.59621096", "0.59490955", "0.5942069", "0.59264094", "0.58781314", "0.587...
0.61203617
11
generates a 3D box mesh with tetrahedral elements with a cylindrical hole in it input
def box_mesh(point1=Point(0,0,0), point2=Point(2,1,1), numptsX=8, numptsY=4, numptsZ=4): mesh = BoxMesh(point1, point2, numptsX, numptsY, numptsZ) print_mesh_stats(mesh) return mesh
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def box_mesh_with_hole(point1=Point(0,0,0), point2=Point(2,1,1), cyl_cent1 = Point(1, -10, 0.5), \n cyl_cent2= Point(1, 10, 0.5), cyl_rad=0.25, numpts=15):\n Router = mshr.Box(point1, point2)\n Rinner = mshr.Cylinder(cyl_cent1, cyl_cent2, cyl_rad, cyl_rad)\n domain = Router - Rinner\n...
[ "0.6957441", "0.63803893", "0.62517613", "0.62113863", "0.6197783", "0.6191623", "0.6189739", "0.6144739", "0.6058943", "0.6045965", "0.60435003", "0.59895325", "0.5982015", "0.592962", "0.59281814", "0.59142035", "0.5913737", "0.5881925", "0.5879472", "0.5861976", "0.5861803...
0.6045953
10
generates a 3D box mesh with tetrahedral elements with a cylindrical hole in it input
def box_mesh_with_hole(point1=Point(0,0,0), point2=Point(2,1,1), cyl_cent1 = Point(1, -10, 0.5), cyl_cent2= Point(1, 10, 0.5), cyl_rad=0.25, numpts=15): Router = mshr.Box(point1, point2) Rinner = mshr.Cylinder(cyl_cent1, cyl_cent2, cyl_rad, cyl_rad) domain = Router - Rinner mesh = mshr.generate_mesh(domain, numpts) print_mesh_stats(mesh) return mesh
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def make_lattice(box):\n from quippy.atoms import make_lattice\n if box.shape == (3, 3):\n\t# http://lammps.sandia.gov/doc/Section_howto.html#howto-12 Describes the\n\t# methodology (look for the section entitled \"6.12. Triclinic\n\t# (non-orthogonal) simulation boxes\") The [a, b, c, alpha, beta, gamma]\n\...
[ "0.63809776", "0.62516385", "0.6212014", "0.61972636", "0.6192103", "0.6190166", "0.6146428", "0.60585725", "0.604773", "0.6045741", "0.6044307", "0.5990217", "0.59825504", "0.59320945", "0.59280205", "0.5914633", "0.59137803", "0.5881047", "0.5878436", "0.58624035", "0.58605...
0.6957271
0
returns the cell centroids lying within given geometric extents
def get_cell_centroid2(cents, extents): cells_in_ee = np.empty(0,int) for i in range(len(cents)): c = cents[i] if( (c > extents[0]).all() and (c <= extents[1]).all() ): cells_in_ee = np.append(cells_in_ee, [i], axis=0) return cells_in_ee
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def find_centroid_cell(self):\n\n x_min, y_min = self.find_min()\n x_max, y_max = self.find_max()\n x_centroid = int((x_max+x_min)/2)\n y_centroid = int((y_max+y_min)/2)\n centroide = x_centroid, y_centroid\n return centroide", "def cells_centroid_py(self):\n A=se...
[ "0.6934343", "0.688382", "0.672464", "0.66895455", "0.6638991", "0.65940636", "0.65940636", "0.6519114", "0.6447519", "0.63558304", "0.6343967", "0.62743145", "0.6219808", "0.6177495", "0.6171115", "0.6169734", "0.6126709", "0.6084704", "0.60784847", "0.60754013", "0.6071603"...
0.79571277
0
given a fenics mesh/mshr.mesh as argument, returns the centroid of the cells in the mesh input
def get_cell_centroids(mesh): num_els = mesh.num_cells() coords = mesh.coordinates() cells = mesh.cells() dim = len(coords[0]) cell_cent = np.zeros((num_els, dim), dtype=float, order='c') for i in range(num_els): pts = [coords[idx] for idx in cells[i]] cell_cent[i] = (1/(dim+1))*sum(pts) #this works only for 2D/3D triangles return cell_cent
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def structured_cell_centroids(mesh):\n dim = mesh.topology().dim()\n stride = fact(dim)\n cents = get_cell_centroids(mesh)\n num_cells = int(mesh.num_cells()/stride)\n cell_cents_struct = np.zeros((num_cells,dim),dtype=float)\n\n for i in range(num_cells):\n start = int(stride*i)\n ...
[ "0.7213677", "0.68678993", "0.68678993", "0.68211424", "0.67176604", "0.6685038", "0.6652162", "0.6585276", "0.65823853", "0.6560574", "0.65416116", "0.65348834", "0.64790154", "0.64459264", "0.6443465", "0.6432527", "0.64168453", "0.63676035", "0.63556105", "0.6349497", "0.6...
0.7951323
0
given a fenics/mshr mesh as argument, this function returns the area of each cell in the mesh input
def get_cell_volumes(mesh): num_els = mesh.num_cells() coords = mesh.coordinates() cells = mesh.cells() dim = len(coords[0]) cell_volume = np.zeros(num_els, dtype=float) div_fact = 1.0/float(fact(dim)) #division factor for n-dim tetrahderon for i in range(num_els): cell_volume[i] = abs(la.det(np.insert(coords[cells[i]], dim, 1, axis=1))) return div_fact*cell_volume
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def compute_mesh_area(mesh):\n vertices = mesh.vertices\n faces = mesh.faces\n areas = [compute_triangle_area(vertices[face]) for face in faces]\n mesh_surface_area = sum(areas)\n return mesh_surface_area", "def compute_mesh_area_numpy(mesh):\n pass", "def compute_mesh_area_smart(mesh):\n ...
[ "0.7731537", "0.7606668", "0.7485298", "0.67880005", "0.6668737", "0.6619852", "0.65239465", "0.65232146", "0.6518488", "0.6516006", "0.6474902", "0.6462257", "0.6318234", "0.63121057", "0.62883294", "0.62868917", "0.6269633", "0.62583816", "0.6257095", "0.6177899", "0.617129...
0.569701
56
given a fenics mesh, this function returns the bounding_box that fits around the domain
def get_domain_bounding_box(mesh=None, cell_cent=None): def local_bbox_method(coords): dim = len(coords[0]) corner_min = np.zeros(dim ,float) corner_max = np.zeros(dim, float) for d in range(dim): corner_min[d] = min(coords[:,d]) corner_max[d] = max(coords[:,d]) return np.vstack((corner_min, corner_max)) if mesh==None and len(np.shape(cell_cent)) == 0: raise AssertionError("provide either fenics mesh or cell centroid of PD particles") if mesh != None and len(np.shape(cell_cent)) == 0: coords = mesh.coordinates() return local_bbox_method(coords) if cell_cent.all() and not mesh: coords = cell_cent return local_bbox_method(coords)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def bounds(self):\n return self._bboxes[0][0] #TODO: merge all coverages", "def get_bounding_box(self):\n return self._domain.get_bounding_box()", "def get_boundingbox(face, width, height, scale=1.3, minsize=None):\n x1 = face.left()\n y1 = face.top()\n x2 = face.right()\n y2 = face.bottom...
[ "0.68726236", "0.6643377", "0.66258126", "0.6624291", "0.66229665", "0.6602934", "0.659407", "0.6578977", "0.6496308", "0.6475542", "0.63955826", "0.6387074", "0.6363856", "0.63531804", "0.63405913", "0.6319945", "0.6304606", "0.62939525", "0.6264529", "0.62618446", "0.625956...
0.7623161
0
returns lists of elements and centroids of corresponding elements that lie in the peridynamic boundary
def get_peridym_mesh_bounds(mesh, struct_grd=False): if(struct_grd): cell_cent = structured_cell_centroids(mesh) max_edge_len = np.diff(cell_cent[0:2][:,0]) range_fact = 2.001*max_edge_len else: cell_cent = get_cell_centroids(mesh) max_edge_len = mesh.hmax() range_fact = 1.5001*max_edge_len dim = len(cell_cent[0]) corner_min, corner_max = get_domain_bounding_box(mesh) num_els = len(cell_cent) bound_range = np.zeros(2*dim, dtype=float) bound_nodes = {} #dict to store the node numbers of centroids that lie within bound_range bound_cents = {} #dict to store the node centroids corresponding to node numbers above for d in range(dim): """ index to direction along which the normal to boundary occurs:# 0 - x_min 1 - x_max 2 - y_min 3 : y_max 4 : z_min 5 : z_max Note: z-normal not applicable to 2d problems """ bound_range[2*d] = corner_min[d] + range_fact #min bound for d bound_range[2*d +1] = corner_max[d] - range_fact #max bound for d bound_nodes[(2*d)] = np.where(cell_cent[:,d] <= bound_range[2*d]) #node nums for min bound bound_nodes[(2*d+1)] = np.where(cell_cent[:,d] >= bound_range[2*d+1]) # node nums for max bound bound_cents[(2*d)] = cell_cent[bound_nodes[2*d][0]] #node centroids for min bound bound_cents[(2*d+1)] = cell_cent[bound_nodes[2*d+1][0]] #node centroids for min bound return bound_nodes, bound_cents #convert list to np array
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_element_centroids(self):\n if self.centroids is None:\n self.centroids = np.vstack((\n np.mean(self.grid['x'], axis=1),\n np.mean(self.grid['z'], axis=1)\n )).T\n\n return self.centroids", "def compute_barycenters(self):\n barycente...
[ "0.70245355", "0.7002935", "0.6660773", "0.62237847", "0.6222728", "0.6192791", "0.61816937", "0.6137272", "0.6075676", "0.599824", "0.5997937", "0.59680784", "0.59588766", "0.5945071", "0.592016", "0.58423734", "0.5838941", "0.5807597", "0.5790801", "0.5777944", "0.576687", ...
0.5779456
19
given a set of cell centroid beloning to regular (Square/Tri) discretization in 2D/3D, the method returns the edge length
def get_peridym_edge_length(cell_cent, struct_grd=False): dim = len(cell_cent[0]) el = np.zeros(dim, dtype = float) if(struct_grd): el_fact = 1.0 else: el_fact = 3.0 for d in range(dim): xx = np.unique(cell_cent[:,d]) el[d] = el_fact*np.max(np.abs(np.diff(xx[0:2]))) return el
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def cell_centroids_original(crd, con):\n \n nele = con.shape[0]\n dim = crd.shape[1]\n centroid_xy = np.zeros((nele, dim))\n for i in range(len(con)):\n el_crds = crd[con[i, :], :] # (4, 2)\n centroid_xy[i, :] = (el_crds).mean(axis=0)\n return centroid_xy", "def cell_edges(self):", ...
[ "0.6159054", "0.610381", "0.6095435", "0.6027462", "0.6002276", "0.59965074", "0.5987988", "0.58697575", "0.58489704", "0.5763668", "0.574332", "0.5738635", "0.57146585", "0.5661359", "0.5644903", "0.56421226", "0.5631822", "0.5622291", "0.55991775", "0.55730665", "0.5567386"...
0.6498209
0
after adding ghost layers, the boundary layers are modified and we need the modified BL's to do further pre and postprocessing
def get_modified_boundary_layers(cell_cent, el, num_lyrs, struct_grd): dim = len(el) bound_range = np.zeros(2*dim, dtype=float) bound_nodes = {} #dict to store the node numbers of centroids that lie within bound_range bound_cents = {} #dict to store the node centroids corresponding to node numbers above if(struct_grd): factor = 1 correction = 0 else: factor = 2 correction = 1 lyrs = float(num_lyrs-1)+ 0.001 for d in range(dim): bound_range[2*d] = factor*np.min(cell_cent[:,d]) + lyrs*el[d] bound_range[2*d+1] = np.max(cell_cent[:,d]) -lyrs*el[d] - el[d]/3*correction bound_nodes[2*d] = np.where(cell_cent[:,d] <= bound_range[2*d]) bound_nodes[(2*d+1)] = np.where(cell_cent[:,d] >= bound_range[2*d+1]) bound_cents[2*d] = cell_cent[bound_nodes[2*d][0]] bound_cents[2*d+1] = cell_cent[bound_nodes[2*d+1][0]] return bound_nodes, bound_cents
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _init_layers(self):\n cls_branch = []\n for _ in range(self.num_reg_fcs):\n cls_branch.append(Linear(self.embed_dims, self.embed_dims))\n cls_branch.append(nn.LayerNorm(self.embed_dims))\n cls_branch.append(nn.ReLU(inplace=True))\n cls_branch.append(Linear(...
[ "0.61987007", "0.60086524", "0.5981845", "0.5945961", "0.5919441", "0.5910352", "0.5873379", "0.58416927", "0.584018", "0.58067274", "0.5799793", "0.57604694", "0.57457453", "0.57456446", "0.5722169", "0.56993973", "0.56873125", "0.5686807", "0.5664177", "0.56639755", "0.5658...
0.0
-1
computes the extents of the new mesh after the addition of ghost layers of centroids
def compute_modified_extents(cell_cent, el, struct_grd=False): dim = len(cell_cent[0]) extents = np.zeros((2, dim), float) min_corners = np.zeros(dim, float) max_corners = np.zeros(dim, float) if(struct_grd): shift_fact = 0.5 else: shift_fact = 1.0/3.0 for d in range(dim): min_corners[d] = np.min(cell_cent[:,d]) max_corners[d] = np.max(cell_cent[:,d]) """ below is done to avoid round-off error due to substraction of two numbers near to each other This occurs when corners in one of the dimension remains unchanged but we still try to compute the new extents """ extents[0][d] = round(min_corners[d] - shift_fact*el[d], 16) extents[1][d] = round(max_corners[d] + shift_fact*el[d], 16) return extents
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def cfdProcessGeometry(self):\r\n \r\n # self.faceCentroids']= [[] for i in range(self.numberOfFaces'])]\r\n # self.faceSf']= [[] for i in range(self.numberOfFaces'])]\r\n # self.faceAreas']= [[] for i in range(self.numberOfFaces'])]\r\n \r\n ## Linear weight of distance from cel...
[ "0.6003565", "0.58814555", "0.58328587", "0.56252813", "0.54995984", "0.5355394", "0.5334341", "0.52900535", "0.5283956", "0.5225239", "0.5213403", "0.52073336", "0.52039486", "0.52000946", "0.5199819", "0.5195994", "0.5187157", "0.5172253", "0.5169794", "0.5148453", "0.51294...
0.56990963
3
writes the peridynamic mesh coordinates to a vetk supported file format
def write_to_vtk(mesh, displacement=None, file_name="gridfile"): cents = get_cell_centroids(mesh) dim = len(cents[0]) if displacement is not None: cents+= displacement file_name = "./"+file_name write_function=None if dim==3: write_function = write_to_vtk3D if dim==2: write_function = write_to_vtk2D write_function(cents, displacement, file_name) pass
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def writeMeshVTP(self, outFile):\n # setup colors\n Colors = vtk.vtkFloatArray()\n #Colors.SetNumberOfComponents(3)\n Colors.SetNumberOfTuples(self.Npts)\n Colors.SetName(self.label) #can change to any string\n\n #points\n vtkPts = vtk.vtkPoints()\n\n #build ...
[ "0.7374671", "0.7342227", "0.72970176", "0.7035716", "0.6870228", "0.6849702", "0.67708486", "0.66564435", "0.6611505", "0.6559381", "0.6552022", "0.65475076", "0.645451", "0.64042014", "0.6353414", "0.6322365", "0.6314053", "0.62772197", "0.626675", "0.6251661", "0.6200027",...
0.6905679
4
writes 3D data to vtk file
def write_to_vtk3D(cents, displacement, file_name): x,y,z = cents.T x = np.array(x, order='c') y = np.array(y, order='c') z = np.array(z, order='c') if displacement is None: pointsToVTK(file_name, x, y, z, data={"x":x, "y":y, "z":z}) else: dispX, dispY, dispZ = displacement.T dispX = np.array(dispX, order='c') dispY = np.array(dispY, order='c') dispZ = np.array(dispZ, order='c') pointsToVTK(file_name, x, y, z, data={"x":x, "y":y, "z":z, "dispX":dispX, "dispY":dispY, "dispZ":dispZ}) pass
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def write_to_vtk(mesh, displacement=None, file_name=\"gridfile\"):\n cents = get_cell_centroids(mesh)\n dim = len(cents[0])\n \n if displacement is not None:\n cents+= displacement\n \n file_name = \"./\"+file_name\n \n write_function=None\n if dim==3:\n write_function = w...
[ "0.72473127", "0.7167763", "0.712707", "0.70639265", "0.70125705", "0.6693387", "0.66590923", "0.6534829", "0.6533069", "0.6519555", "0.6518537", "0.6369265", "0.6330161", "0.62977713", "0.62542105", "0.6210785", "0.61916816", "0.61361706", "0.6113361", "0.61057776", "0.60783...
0.66667515
6
writes 2D data to VTK
def write_to_vtk2D(cents, displacement, file_name): x,y = cents.T x = np.copy(x, order='c') y = np.copy(y, order='c') z = np.zeros(len(x), order='c') if displacement is None: pointsToVTK(file_name, x, y, z, data={"x":x, "y":y}) else: dispX, dispY = displacement.T dispX = np.array(dispX, order='c') dispY = np.array(dispY, order='c') pointsToVTK(file_name, x, y, z, data={"x":x, "y":y, "dispX":dispX, "dispY":dispY}) pass
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def saveVelocityAndPressureVTK_binary(pressure,u,v,w,x,y,z,filename,dims):\n numEl_size = u.size; numEl = np.prod(numEl_size);\n # open the file and write the ASCII header:\n file = open(filename,'w')\n file.write('# vtk DataFile Version 3.0\\n')\n file.write('VTK file for data post-processed with P...
[ "0.70123774", "0.68971926", "0.6655694", "0.6629076", "0.64621574", "0.6329957", "0.6204803", "0.6192847", "0.60517424", "0.599518", "0.59665126", "0.5899244", "0.58768153", "0.5847911", "0.5827327", "0.5739005", "0.57155335", "0.57155335", "0.57111055", "0.5698415", "0.56801...
0.62436384
6
creates n distint colors for plts, where n = num_colors
def get_colors(num_colors): import colorsys colors = [] for i in np.arange(0., 360., 360. / num_colors): hue = i/360. lightness = (50 + np.random.rand() * 10)/100. saturation = (90 + np.random.rand() * 10)/100. colors.append(colorsys.hls_to_rgb(hue, lightness, saturation)) return colors
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_funky_cmap(n_colors):\n\n colors = []\n for i in np.arange(0., 360., 360. / n_colors):\n h = i / 360.\n l = (50 + np.random.rand() * 10) / 100.\n s = (90 + np.random.rand() * 10) / 100.\n colors.append(hls_to_rgb(h, l, s))\n\n return colors", "def generate_n_colors...
[ "0.749464", "0.74524117", "0.7178915", "0.71482074", "0.7119622", "0.7088267", "0.6929152", "0.6905464", "0.68315506", "0.6754532", "0.672276", "0.6686234", "0.6637927", "0.66152275", "0.6572149", "0.6569774", "0.6569445", "0.65186614", "0.650056", "0.6491387", "0.6475267", ...
0.66890764
11
returns list of markers to be used for plt functions; max markers allowed = 18
def get_markers(num_markers): markers = ['^','o','P','X','*', 'd','<', '>', ',','|', '1','2','3','4','s','p','*','h','+'] if(num_markers>18): sys.exit("cannot create more than 18 markers, refactor your code; force exiting") return markers[0:num_markers]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_markerstyles(n=None):\n all_markers = ['o', 'D', 's', '2', '*', 'h', '8', 'v', 'x', '+', 5, 'd', '>', 7, '.', '1', 'p', '3',\n 6, 0, 1, 2, 3, 4, '4', '<', 'H', '^']\n # Note: 0: 'tickleft', 1: 'tickright', 2: 'tickup', 3: 'tickdown', 4: 'caretleft', 'D': 'diamond', 6: 'caretup',\n ...
[ "0.65396273", "0.6017809", "0.5980015", "0.5968961", "0.5907878", "0.5887821", "0.5838874", "0.5838874", "0.5829966", "0.58082724", "0.5748896", "0.5732162", "0.5673139", "0.5570676", "0.5556668", "0.5437964", "0.5424193", "0.5405786", "0.53380895", "0.53265303", "0.5313803",...
0.74688494
0
Basic attach/detach IPv6 test with single UE
def test_attach_detach_ipv6(self): num_ues = 2 detach_type = [ s1ap_types.ueDetachType_t.UE_NORMAL_DETACH.value, s1ap_types.ueDetachType_t.UE_SWITCHOFF_DETACH.value, ] wait_for_s1 = [True, False] self._s1ap_wrapper.configUEDevice(num_ues) # Default apn over-write magma_default_apn = { "apn_name": "magma.ipv4", # APN-name "qci": 9, # qci "priority": 15, # priority "pre_cap": 1, # preemption-capability "pre_vul": 0, # preemption-vulnerability "mbr_ul": 200000000, # MBR UL "mbr_dl": 100000000, # MBR DL "pdn_type": 2, # PDN Type 0-IPv4,1-IPv6,2-IPv4v6 } apn_list = [magma_default_apn] for i in range(num_ues): req = self._s1ap_wrapper.ue_req print( "************************* Running End to End attach for ", "UE id ", req.ue_id, ) self._s1ap_wrapper.configAPN( "IMSI" + "".join([str(j) for j in req.imsi]), apn_list, default=False, ) # Now actually complete the attach self._s1ap_wrapper._s1_util.attach( req.ue_id, s1ap_types.tfwCmd.UE_END_TO_END_ATTACH_REQUEST, s1ap_types.tfwCmd.UE_ATTACH_ACCEPT_IND, s1ap_types.ueAttachAccept_t, pdn_type=2, ) # Wait on EMM Information from MME self._s1ap_wrapper._s1_util.receive_emm_info() print( "************************* Running UE detach for UE id ", req.ue_id, ) # Now detach the UE self._s1ap_wrapper.s1_util.detach( req.ue_id, detach_type[i], wait_for_s1[i], )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_mgre6(self):\n\n self.pg0.config_ip6()\n self.pg0.resolve_ndp()\n\n e = VppEnum.vl_api_tunnel_encap_decap_flags_t\n\n for itf in self.pg_interfaces[3:]:\n #\n # one underlay nh for each overlay/tunnel peer\n #\n itf.config_ip6()\n ...
[ "0.6983675", "0.6860099", "0.6674686", "0.6540398", "0.6405422", "0.63681173", "0.6198082", "0.61734575", "0.6095474", "0.6050625", "0.602617", "0.6024223", "0.59982115", "0.59627396", "0.59101856", "0.58795923", "0.5868089", "0.58631897", "0.5853766", "0.5821142", "0.5754354...
0.83120114
0
Create a Pandas DataFrame out of a .yaml file. Examples
def yaml_to_pandas(filename: str) -> Tuple[pd.DataFrame, Optional[str]]: # Read the yaml file with open(filename, 'r') as f: dict_ = yaml.load(f, Loader=yaml.SafeLoader) project = dict_.pop("__project__", None) # Convert the yaml dictionary into a dataframe data: Dict[str, Dict[Tuple[Hashable, Hashable], Any]] = {} for k1, v1 in dict_.items(): for k2, v2 in v1['users'].items(): data[k2] = {('info', k): v for k, v in v1.items() if k != 'users'} data[k2][NAME] = v2 data[k2][PROJECT] = k1 df = pd.DataFrame(data).T # Fortmat, sort and return the dataframe df.index.name = 'username' df[SBU_REQUESTED] = df[SBU_REQUESTED].astype(float) df[TMP] = df.index df.sort_values(by=[PROJECT, TMP], inplace=True) df.sort_index(axis=1, inplace=True, ascending=False) del df[TMP] df[ACTIVE] = False validate_usernames(df) return df, project
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def read_to_dataframe(\n filename: Union[Path, str],\n marker: str = \"---\",\n csv_options: Optional[Dict[str, Any]] = None,\n yaml_options: Optional[Dict[str, Any]] = None,\n) -> Tuple[DataFrame, Dict[str, Any]]:\n if DataFrame is None:\n raise ModuleNotFoundError(\n \"Module pan...
[ "0.6223266", "0.6129303", "0.6100554", "0.60840815", "0.6082889", "0.5902778", "0.5898788", "0.5859711", "0.5854862", "0.5829673", "0.5819154", "0.58064884", "0.58064884", "0.5772481", "0.5768575", "0.57589835", "0.5717933", "0.5713953", "0.5706922", "0.5706922", "0.5701406",...
0.6970648
0
Validate that all users belonging to an account are available in the .yaml input file. Raises a KeyError If one or more usernames printed by the ``accinfo`` comand are absent from df.
def validate_usernames(df: pd.DataFrame) -> None: _usage = check_output(['accinfo'], encoding='utf8') iterator = filter(None, _usage.splitlines()) for i in iterator: if i == "# Users linked to this account": usage = np.array(list(iterator), dtype=np.str_) break else: raise ValueError("Failed to parse the passed .yaml file") bool_ar1 = np.isin(usage, df.index) bool_ar2 = np.isin(df.index, usage) name_diff = "" name_diff += "".join(f"\n- {name}" for name in usage[~bool_ar1]) name_diff += "".join(f"\n+ {name}" for name in df.index[~bool_ar2].values) if name_diff: raise ValueError(f"User mismatch between .yaml file and `accinfo` output:{name_diff}")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def valid_user_data(user_data):\n return 'account_ids' in user_data and 'monthly_expenses' in user_data", "def load_users():\n try:\n # json file should be in the same file location as the function\n base_dir = os.path.dirname(__file__)\n abs_file = os.path.join(base_dir, 'users.json')...
[ "0.5713202", "0.5615229", "0.56124175", "0.556478", "0.5467763", "0.5272703", "0.5202546", "0.52003455", "0.5149381", "0.5146254", "0.51337475", "0.5124031", "0.509678", "0.5084452", "0.5061947", "0.5024947", "0.5024677", "0.49984267", "0.49967858", "0.4986641", "0.49853197",...
0.76862663
0
This is our handler for the menu item. Our inItemRef is the refcon we registered in our XPLMAppendMenuItem calls. It is either +1000 or 1000 depending on which menu item is picked.
def MyMenuHandlerCallback(self, inMenuRef, inItemRef): if (self.DataRef != 0): """ We read the data ref, add the increment and set it again. This changes the nav frequency. """ XPLMSetDatai(self.DataRef, XPLMGetDatai(self.DataRef) + inItemRef) pass
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def add_to_menu ( self, menu_item ):\r\n pass", "def on_menu_item(self, e):\n if e.Id == ids.RESTORE:\n wx.PostEvent(self.app.roster, ev.ShowRoster())\n elif e.Id == ids.HIDE:\n wx.PostEvent(self.app.roster, ev.HideRoster())\n elif e.Id == ids.EXIT:\n ...
[ "0.61091536", "0.57265496", "0.55302376", "0.5491266", "0.5436538", "0.5421155", "0.54151934", "0.54015744", "0.53766143", "0.5230789", "0.5225447", "0.522308", "0.5221148", "0.5216817", "0.5182499", "0.5182356", "0.51636726", "0.5154699", "0.51006156", "0.5060227", "0.505950...
0.7293928
0
Add constant to elf and return corresponding index If constant already existed, this method won't make a new copy
def add_constant(self, type_: str, value): assert type_ in [Constant.STR, Constant.INT, Constant.DOUBLE], 'Error constant type' for idx, const in enumerate(self.constants): if (const.type_, const.value) == (type_, value): return idx self.constants.append(Constant(type_, value)) return len(self.constants) - 1
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _get_const(self, constant):\n for i in range(len(self.consts)):\n if type(self.consts[i]) == type(constant) and self.consts[i] == constant:\n return i\n\n self.consts.append(constant)\n return len(self.consts) - 1", "def add_constant(value, name):\n\n # TODO: inefficient linear search...
[ "0.6549235", "0.5801646", "0.564684", "0.5430631", "0.54223347", "0.52493185", "0.517787", "0.5129371", "0.50737894", "0.5043636", "0.50140244", "0.49892992", "0.4976119", "0.49711967", "0.4970512", "0.49640742", "0.4963301", "0.49513727", "0.49467608", "0.49171853", "0.49073...
0.57320184
2
Returns a string representation of a template action.
def tmpl_to_str(self, template_idx, o1_id, o2_id): template_str = self.template_generator.templates[template_idx] holes = template_str.count('OBJ') assert holes <= 2 if holes <= 0: return template_str elif holes == 1: return template_str.replace('OBJ', self.vocab_act[o1_id]) else: return template_str.replace('OBJ', self.vocab_act[o1_id], 1)\ .replace('OBJ', self.vocab_act[o2_id], 1)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def action_to_str(action):\n raise NotImplementedError", "def action_to_pretty_str(action) :\n raise NotImplementedError", "def actionString(self,action):\n return str(self._mdp.A[action])", "def __str__(self):\n return _action_args_dict[self.action].name", "def as_action(self) ...
[ "0.7366068", "0.73624396", "0.7298899", "0.7282407", "0.72067684", "0.7068115", "0.6933396", "0.6931171", "0.6851052", "0.68281645", "0.6725535", "0.6540052", "0.6448798", "0.6448798", "0.6302694", "0.62781346", "0.62517726", "0.62420046", "0.62164974", "0.62151736", "0.61607...
0.5388328
68
Returns the computational basis samples generated for all wires. In the _qubit_device.py, the function calls for analytic_probability for its operations.
def generate_samples(self): self.analytic_probability()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def generate_all_samples(self):\n\n n_samples, n_dimensions, V = self.n_samples, self.n_dimensions, self.V\n sample_all = np.zeros([n_samples, n_dimensions])\n\n X = int(0)\n for j in range(1, n_samples):\n X ^= V[self.index_of_least_significant_zero_bit(j - 1)]\n ...
[ "0.6354442", "0.6312051", "0.6045636", "0.5982405", "0.5920717", "0.58340013", "0.57912403", "0.5770052", "0.5703894", "0.5652854", "0.5625554", "0.5597974", "0.5590781", "0.5573974", "0.5573084", "0.5562036", "0.55604845", "0.55475634", "0.55385345", "0.552718", "0.552718", ...
0.5722098
8
Statistics of operation calls
def operation_calls(self): return self._operation_calls
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def callstats(): # real signature unknown; restored from __doc__\n return ()", "def compute_statistics(self):", "def op_callcount(self):\r\n # timing is stored by node, we compute timing by Op on demand\r\n rval = {}\r\n for node, count in self.apply_callcount.items():\r\n rv...
[ "0.7313468", "0.7228227", "0.7212459", "0.7169933", "0.6866486", "0.6595815", "0.6593144", "0.6504629", "0.64714056", "0.6452719", "0.6406453", "0.6396562", "0.6314827", "0.6296872", "0.62631065", "0.6217563", "0.6191796", "0.61761403", "0.6125694", "0.61125034", "0.6077134",...
0.6650646
5
load data Divide the train set into train set and crossvalidation set in ratio.
def load_data(trainfile, testfile): raw_train = pd.read_csv(trainfile, header=None) raw_test = pd.read_csv(testfile, header=None) train = raw_train.values test = raw_test.values train_features = train[0::, 1::] train_label = train[::, 0] test_features = test[0::, 1::] test_label = test[::, 0] train, cv , train_label, cv_label = train_test_split(train_features,train_label, test_size=0.33, random_state=42) return train, train_label, \ cv, cv_label, \ test_features, test_label
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def train_test_loaders(dataset, validation_ratio=0.2, **kwargs):\n dataset_size = len(dataset)\n test_size = int(np.floor(validation_ratio * dataset_size))\n train_size = dataset_size - test_size\n print('TRAIN SIZE {}'.format(train_size))\n print('TEST SIZE {}'.format(test_size))\n train_dataset...
[ "0.7038124", "0.68708676", "0.6773931", "0.66979957", "0.6655997", "0.66537637", "0.6629245", "0.6622395", "0.6614368", "0.6608594", "0.6596205", "0.65692824", "0.6518721", "0.6510277", "0.6498947", "0.6481965", "0.64735436", "0.64697593", "0.64670414", "0.64647627", "0.64571...
0.67273855
3
gR(D,A) = (H(D) H(D|A)) / H(D|A)
def _cal_igr(x, y): return (_cal_entropy(y) - _cal_conditionalEnt(x, y)) / _cal_conditionalEnt(x, y)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def g(self, RD):\n g = 1 / np.sqrt((1 + 3 * np.power(self.q, 2)) / np.power(np.pi, 2)) \n \n return g", "def g(r):\n return np.cos(r) / (np.exp(0.4 * (r - 8)) + 1)", "def r0(self):\n return self.p[0] / self.p[1]", "def get_rm(g):\n return 1 / g", "def calculate_gear_ra...
[ "0.65369403", "0.6507665", "0.6253428", "0.6249482", "0.6246978", "0.6144162", "0.6086388", "0.608423", "0.6083159", "0.60784787", "0.60262895", "0.5952174", "0.5915985", "0.5905272", "0.5902779", "0.5874937", "0.5870388", "0.58570063", "0.5824216", "0.582396", "0.58188695", ...
0.5556284
42
Build Data Docs for a project.
def docs_build(directory, site_name, view=True, assume_yes=False): context = toolkit.load_data_context_with_error_handling(directory) build_docs(context, site_name=site_name, view=view, assume_yes=assume_yes) toolkit.send_usage_message( data_context=context, event="cli.docs.build", success=True )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def generate_documentation(self):\n self.generate_api_docs()\n build.main([\n self.SOURCE_DIR,\n self.BUILD_DIR,\n ])", "def build_docs(context, site_name=None, view=True, assume_yes=False):\n logger.debug(\"Starting cli.datasource.build_docs\")\n\n if site_name i...
[ "0.6422397", "0.64200485", "0.63599217", "0.63230777", "0.6303222", "0.6295382", "0.61533386", "0.6144586", "0.60924774", "0.60924774", "0.60924774", "0.60327756", "0.6032378", "0.5982227", "0.59735847", "0.59703684", "0.59529877", "0.59529877", "0.5898977", "0.58841336", "0....
0.58279645
24
List known Data Docs Sites.
def docs_list(directory): context = toolkit.load_data_context_with_error_handling(directory) docs_sites_url_dicts = context.get_docs_sites_urls() docs_sites_strings = [ " - <cyan>{}</cyan>: {}".format( docs_site_dict["site_name"], docs_site_dict.get("site_url") or f"site configured but does not exist. Run the following command to build site: great_expectations " f'docs build --site-name {docs_site_dict["site_name"]}', ) for docs_site_dict in docs_sites_url_dicts ] if len(docs_sites_strings) == 0: cli_message("No Data Docs sites found") else: list_intro_string = _build_intro_string(docs_sites_strings) cli_message_list(docs_sites_strings, list_intro_string) toolkit.send_usage_message( data_context=context, event="cli.docs.list", success=True )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def find_all(client):\n return list(map(lambda s: Site(s), client.get_api_resource(\"self/sites\")))", "def list_sites():\n sites = db_client.query_sites_list()\n return jsonify(sites)", "def list_sites():\n result = []\n querystring = 'select sitename from {};'.format(TABLES[0]))\n res =...
[ "0.68871546", "0.6885869", "0.66649675", "0.6592278", "0.6572376", "0.6511911", "0.65085405", "0.6494182", "0.6483239", "0.64800274", "0.6446842", "0.64075714", "0.63722503", "0.63695705", "0.63695705", "0.62545717", "0.6170089", "0.61219245", "0.60714644", "0.6027309", "0.59...
0.6646616
3
Build documentation in a context
def build_docs(context, site_name=None, view=True, assume_yes=False): logger.debug("Starting cli.datasource.build_docs") if site_name is not None: site_names = [site_name] else: site_names = None index_page_locator_infos = context.build_data_docs( site_names=site_names, dry_run=True ) msg = "\nThe following Data Docs sites will be built:\n\n" for site_name, index_page_locator_info in index_page_locator_infos.items(): msg += " - <cyan>{}:</cyan> ".format(site_name) msg += "{}\n".format(index_page_locator_info) cli_message(msg) if not assume_yes: toolkit.confirm_proceed_or_exit() cli_message("\nBuilding Data Docs...\n") context.build_data_docs(site_names=site_names) cli_message("Done building Data Docs") if view: context.open_data_docs(site_name=site_name, only_if_exists=True)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def sphinxify(docstring, context, buildername='html'):\n\n srcdir = mkdtemp()\n srcdir = encoding.to_unicode_from_fs(srcdir)\n\n base_name = osp.join(srcdir, 'docstring')\n rst_name = base_name + '.rst'\n\n if buildername == 'html':\n suffix = '.html'\n else:\n suffix = '.txt'\n ...
[ "0.7200595", "0.71286154", "0.70953643", "0.69826937", "0.67844534", "0.6753231", "0.6731056", "0.67097837", "0.6688072", "0.6656818", "0.65566325", "0.65320677", "0.6452669", "0.6432759", "0.6430928", "0.6357106", "0.63557726", "0.631642", "0.63006896", "0.62424266", "0.6237...
0.5954264
42
Normalize the batch data, use coordinates of the block centered at origin,
def normalize_data(batch_data): B, N, C = batch_data.shape normal_data = np.zeros((B, N, C)) for b in range(B): pc = batch_data[b] centroid = np.mean(pc, axis=0) pc = pc - centroid m = np.max(np.sqrt(np.sum(pc ** 2, axis=1))) pc = pc / m normal_data[b] = pc return normal_data
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _normalize(self, dataset):\n if self.max is None: # if we are normalizing the training set\n self.max, self.min = dataset.max(), dataset.min() # find max, min value for each columns\n for row in dataset.index: # for each row in dataset\n for c...
[ "0.69384634", "0.68707067", "0.6639805", "0.6637574", "0.65870476", "0.647955", "0.644119", "0.63818425", "0.6363857", "0.626459", "0.62471074", "0.62398934", "0.62313163", "0.6193541", "0.6191717", "0.61898583", "0.618904", "0.61679614", "0.6146879", "0.61388755", "0.6138875...
0.7281029
0
Shuffle data and labels.
def shuffle_data(data, labels): idx = np.arange(len(labels)) np.random.shuffle(idx) return data[idx, ...], labels[idx], idx
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def shuffle_data(self):\n images = list(self.train_images)\n labels = list(self.train_labels)\n self.train_images = []\n self.train_labels = []\n\n # create list of permutated index and shuffle data accoding to list\n idx = np.random.permutation(len(labels))\n for i...
[ "0.84921646", "0.84220874", "0.800393", "0.7868807", "0.78123", "0.7764462", "0.7758387", "0.76847744", "0.7454648", "0.74520284", "0.7394949", "0.7394949", "0.73529667", "0.72944164", "0.7293919", "0.7201344", "0.7141838", "0.70985335", "0.7062494", "0.70317477", "0.69610476...
0.77576107
9
Shuffle orders of points in each point cloud changes FPS behavior. Use the same shuffling idx for the entire batch.
def shuffle_points(batch_data): idx = np.arange(batch_data.shape[1]) np.random.shuffle(idx) return batch_data[:,idx,:]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def shuffle_points(batch_data):\n idx = np.arange(batch_data.shape[1])\n np.random.shuffle(idx)\n return batch_data[:, idx, :]", "def shuffle_points(mutated_genome,index):\n random.shuffle(mutated_genome[index][2])", "def _shuffle_roidb_idx(self):\n self.perm = np.random.permutation(np.arange...
[ "0.72801876", "0.72373664", "0.6753397", "0.6724248", "0.65495604", "0.6484776", "0.6474384", "0.6474384", "0.6457585", "0.64205366", "0.6403728", "0.6389177", "0.63853645", "0.6350583", "0.6297481", "0.62960184", "0.62893575", "0.62821025", "0.6237439", "0.62186027", "0.6217...
0.725899
1
Randomly rotate the point clouds to augument the dataset rotation is per shape based along up direction
def rotate_point_cloud(batch_data): rotated_data = np.zeros(batch_data.shape, dtype=np.float32) for k in range(batch_data.shape[0]): rotation_angle = np.random.uniform() * 2 * np.pi cosval = np.cos(rotation_angle) sinval = np.sin(rotation_angle) rotation_matrix = np.array([[cosval, 0, sinval], [0, 1, 0], [-sinval, 0, cosval]]) shape_pc = batch_data[k, ...] rotated_data[k, ...] = np.dot(shape_pc.reshape((-1, 3)), rotation_matrix) return rotated_data
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def rotate_point_cloud(data):\n rotated_data = np.zeros(data.shape, dtype=np.float32)\n for k in xrange(data.shape[0]):\n rotation_angle = np.random.uniform() * 2 * np.pi\n cosval = np.cos(rotation_angle)\n sinval = np.sin(rotation_angle)\n rotation_matrix ...
[ "0.73846465", "0.6965289", "0.6953168", "0.6815348", "0.6815157", "0.6815157", "0.6747695", "0.6692255", "0.66423446", "0.6594938", "0.6594938", "0.6594938", "0.6522628", "0.64682126", "0.6416248", "0.6369285", "0.61995316", "0.6168288", "0.6102223", "0.5960313", "0.5927957",...
0.6925797
5
Randomly rotate the point clouds to augument the dataset rotation is per shape based along up direction
def rotate_point_cloud_z(batch_data): rotated_data = np.zeros(batch_data.shape, dtype=np.float32) for k in range(batch_data.shape[0]): rotation_angle = np.random.uniform() * 2 * np.pi cosval = np.cos(rotation_angle) sinval = np.sin(rotation_angle) rotation_matrix = np.array([[cosval, sinval, 0], [-sinval, cosval, 0], [0, 0, 1]]) shape_pc = batch_data[k, ...] rotated_data[k, ...] = np.dot(shape_pc.reshape((-1, 3)), rotation_matrix) return rotated_data
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def rotate_point_cloud(data):\n rotated_data = np.zeros(data.shape, dtype=np.float32)\n for k in xrange(data.shape[0]):\n rotation_angle = np.random.uniform() * 2 * np.pi\n cosval = np.cos(rotation_angle)\n sinval = np.sin(rotation_angle)\n rotation_matrix ...
[ "0.7385551", "0.69660366", "0.69539493", "0.69265366", "0.69265366", "0.69265366", "0.6815256", "0.67491716", "0.6693226", "0.66440564", "0.65964067", "0.65964067", "0.65964067", "0.65239024", "0.6469736", "0.64177275", "0.63704824", "0.6198356", "0.61689585", "0.6100769", "0...
0.6815286
7
Randomly perturb the point clouds by small rotations
def rotate_perturbation_point_cloud_with_normal(batch_data, angle_sigma=0.06, angle_clip=0.18): rotated_data = np.zeros(batch_data.shape, dtype=np.float32) for k in range(batch_data.shape[0]): angles = np.clip(angle_sigma*np.random.randn(3), -angle_clip, angle_clip) Rx = np.array([[1,0,0], [0,np.cos(angles[0]),-np.sin(angles[0])], [0,np.sin(angles[0]),np.cos(angles[0])]]) Ry = np.array([[np.cos(angles[1]),0,np.sin(angles[1])], [0,1,0], [-np.sin(angles[1]),0,np.cos(angles[1])]]) Rz = np.array([[np.cos(angles[2]),-np.sin(angles[2]),0], [np.sin(angles[2]),np.cos(angles[2]),0], [0,0,1]]) R = np.dot(Rz, np.dot(Ry,Rx)) shape_pc = batch_data[k,:,0:3] shape_normal = batch_data[k,:,3:6] rotated_data[k,:,0:3] = np.dot(shape_pc.reshape((-1, 3)), R) rotated_data[k,:,3:6] = np.dot(shape_normal.reshape((-1, 3)), R) return rotated_data
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _sample_epislon(self, cur_y, cur_z):\n old_loglik = self._loglik(cur_y, cur_z)\n old_epislon = self.epislon\n \n # modify the feature ownership matrix\n self.epislon = np.random.beta(1,1)\n new_loglik = self._loglik(cur_y, cur_z)\n move_prob = 1 / (1 + np.exp(old_lo...
[ "0.6232219", "0.61596", "0.6088852", "0.6088638", "0.60208404", "0.59796554", "0.5843116", "0.5838049", "0.5800207", "0.5765245", "0.57596815", "0.5757105", "0.5742337", "0.57243687", "0.5712514", "0.57068694", "0.5703828", "0.5692578", "0.5684955", "0.5684955", "0.5684955", ...
0.57714236
9
Rotate the point cloud along up direction with certain angle.
def rotate_point_cloud_by_angle(batch_data, rotation_angle): rotated_data = np.zeros(batch_data.shape, dtype=np.float32) for k in range(batch_data.shape[0]): #rotation_angle = np.random.uniform() * 2 * np.pi cosval = np.cos(rotation_angle) sinval = np.sin(rotation_angle) rotation_matrix = np.array([[cosval, 0, sinval], [0, 1, 0], [-sinval, 0, cosval]]) shape_pc = batch_data[k,:,0:3] rotated_data[k,:,0:3] = np.dot(shape_pc.reshape((-1, 3)), rotation_matrix) return rotated_data
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def rotateZUp(self):\n MV = self.MV\n MV[:3, 1] = 0, 0, 1 # 2nd col is up vector, make it point along z axis\n # set bottom left and bottom right z values to zero:\n MV[2, 0] = 0\n MV[2, 2] = 0\n a = MV[0, 0] # grab top left value\n b = np.sqrt(1 - a**2) # calc new ...
[ "0.6844331", "0.6844331", "0.6701853", "0.64968896", "0.6489468", "0.63358444", "0.63339704", "0.6315883", "0.6225028", "0.6200144", "0.6192065", "0.6183062", "0.6182096", "0.6172054", "0.6166697", "0.6152675", "0.6152675", "0.6152675", "0.6152675", "0.6152249", "0.6143772", ...
0.57263416
58
Rotate the point cloud along up direction with certain angle.
def rotate_point_cloud_by_angle_with_normal(batch_data, rotation_angle): rotated_data = np.zeros(batch_data.shape, dtype=np.float32) for k in range(batch_data.shape[0]): #rotation_angle = np.random.uniform() * 2 * np.pi cosval = np.cos(rotation_angle) sinval = np.sin(rotation_angle) rotation_matrix = np.array([[cosval, 0, sinval], [0, 1, 0], [-sinval, 0, cosval]]) # rotate along y axis shape_pc = batch_data[k,:,0:3] shape_normal = batch_data[k,:,3:6] rotated_data[k,:,0:3] = np.dot(shape_pc.reshape((-1, 3)), rotation_matrix) rotated_data[k,:,3:6] = np.dot(shape_normal.reshape((-1,3)), rotation_matrix) return rotated_data
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def rotateZUp(self):\n MV = self.MV\n MV[:3, 1] = 0, 0, 1 # 2nd col is up vector, make it point along z axis\n # set bottom left and bottom right z values to zero:\n MV[2, 0] = 0\n MV[2, 2] = 0\n a = MV[0, 0] # grab top left value\n b = np.sqrt(1 - a**2) # calc new ...
[ "0.6844139", "0.6844139", "0.6701813", "0.6496726", "0.6489327", "0.63356435", "0.63336676", "0.63157606", "0.6224757", "0.6200055", "0.6191932", "0.6182845", "0.6181952", "0.6171766", "0.6166754", "0.6152536", "0.6152536", "0.6152536", "0.6152536", "0.6152011", "0.6143452", ...
0.0
-1
Randomly perturb the point clouds by small rotations
def rotate_perturbation_point_cloud(batch_data, angle_sigma=0.06, angle_clip=0.18): rotated_data = np.zeros(batch_data.shape, dtype=np.float32) for k in range(batch_data.shape[0]): angles = np.clip(angle_sigma*np.random.randn(3), -angle_clip, angle_clip) Rx = np.array([[1,0,0], [0,np.cos(angles[0]),-np.sin(angles[0])], [0,np.sin(angles[0]),np.cos(angles[0])]]) Ry = np.array([[np.cos(angles[1]),0,np.sin(angles[1])], [0,1,0], [-np.sin(angles[1]),0,np.cos(angles[1])]]) Rz = np.array([[np.cos(angles[2]),-np.sin(angles[2]),0], [np.sin(angles[2]),np.cos(angles[2]),0], [0,0,1]]) R = np.dot(Rz, np.dot(Ry,Rx)) shape_pc = batch_data[k, ...] rotated_data[k, ...] = np.dot(shape_pc.reshape((-1, 3)), R) return rotated_data
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _sample_epislon(self, cur_y, cur_z):\n old_loglik = self._loglik(cur_y, cur_z)\n old_epislon = self.epislon\n \n # modify the feature ownership matrix\n self.epislon = np.random.beta(1,1)\n new_loglik = self._loglik(cur_y, cur_z)\n move_prob = 1 / (1 + np.exp(old_lo...
[ "0.6232219", "0.61596", "0.6088852", "0.6088638", "0.60208404", "0.59796554", "0.5843116", "0.5838049", "0.5800207", "0.57714236", "0.5765245", "0.57596815", "0.5742337", "0.57243687", "0.5712514", "0.57068694", "0.5703828", "0.5692578", "0.5684955", "0.5684955", "0.5684955",...
0.5757105
12
Randomly jitter points. jittering is per point.
def jitter_point_cloud(batch_data, sigma=0.01, clip=0.05): B, N, C = batch_data.shape assert(clip > 0) jittered_data = np.clip(sigma * np.random.randn(B, N, C), -1*clip, clip) jittered_data += batch_data return jittered_data
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def jitter(interval):\n\treturn interval * (0.9 + 0.2 * random.random())", "def rand_int_jitter():\n return random.randint(0, 10)", "def jitter(x, y): \r\n dx = max(x) - min(x)\r\n dy = max(y) - min(y)\r\n assert(dx > 0)\r\n assert(dy > 0)\r\n d = math.sqrt(float(dx**2 + dy**2))\r\n print...
[ "0.73470956", "0.72041214", "0.71101594", "0.70101404", "0.67910165", "0.6369184", "0.6235321", "0.6233606", "0.6164337", "0.6122401", "0.6082923", "0.6006233", "0.5953184", "0.595242", "0.5943489", "0.59328365", "0.5924084", "0.5902909", "0.5856639", "0.58350176", "0.5834869...
0.5786313
25
Randomly shift point cloud. Shift is per point cloud.
def shift_point_cloud(batch_data, shift_range=0.1): B, N, C = batch_data.shape shifts = np.random.uniform(-shift_range, shift_range, (B,3)) for batch_index in range(B): batch_data[batch_index,:,:] += shifts[batch_index,:] return batch_data
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def shift(self):\n \"\"\"\n shift cluster randomly within bounds of im\n \"\"\"\n r = self.std\n mid = self.mid_pixel #center pixel index of 384x384 image\n delta = self.im_size - self.mid_pixel - r - 10\n \n x = np.random.randint(low=-1*delta,high=delta,size...
[ "0.72441024", "0.6811636", "0.64971095", "0.61066484", "0.60589975", "0.5939156", "0.593459", "0.5932551", "0.5909121", "0.57994634", "0.57626474", "0.5751564", "0.57320094", "0.57160926", "0.5685124", "0.56692094", "0.56554824", "0.564319", "0.56338567", "0.5614417", "0.5607...
0.7352118
0
Randomly scale the point cloud. Scale is per point cloud.
def random_scale_point_cloud(batch_data, scale_low=0.8, scale_high=1.25): B, N, C = batch_data.shape scales = np.random.uniform(scale_low, scale_high, B) for batch_index in range(B): batch_data[batch_index,:,:] *= scales[batch_index] return batch_data
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def scale_point_cloud(batch_data, scale_ratio=0.8):\n B, N, C = batch_data.shape\n scale = np.random.uniform(scale_ratio,1/scale_ratio,(B,1,1))\n scaled_data = batch_data*scale\n return scaled_data", "def ScalePoints(points, sigma = 0.02):\n assert(points.shape[1]==3)\n\n scale = np.random.unif...
[ "0.76311445", "0.6895509", "0.67543155", "0.64973104", "0.63981426", "0.62324494", "0.6162519", "0.6148883", "0.6085778", "0.60736156", "0.59713405", "0.5954016", "0.5944525", "0.5929756", "0.58707345", "0.5869925", "0.58671856", "0.58244807", "0.5817822", "0.57851666", "0.57...
0.76930815
0
Shows message on application status bar
def show_status(self, status): self.statusBar().showMessage(status, 2000)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def showStatus(self, message):\n self.status_bar.showMessage(message)", "def statusbar_msg(self, msg):\n self.statusbar.clearMessage()\n self.statusbar.showMessage(msg)", "def display_message(self, message):\n context_id = self.status_bar.get_context_id(\"\")\n self.status_ba...
[ "0.84011865", "0.833993", "0.8174036", "0.77206886", "0.76677835", "0.75396955", "0.75219834", "0.75037515", "0.7410699", "0.73903996", "0.7333679", "0.72647804", "0.7226942", "0.70724267", "0.7072069", "0.7037532", "0.70224166", "0.69146335", "0.6912863", "0.6885066", "0.681...
0.81817955
2
Override the closeEvent method
def closeEvent(self, event): if settings.IN_SYNC: event.ignore() QtGui.QMessageBox.information(self, "Abort", "You are in the middle of " + "a conflicted merge. Please resolve it first.") else: event.accept()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def outCloseEvent(self):\r\n pass", "def onClose(self, event):\n pass", "def onClose(self, event):\n pass", "def onClose(self, event):\n pass", "def closeEvent(self, event):\n self.exit()\n event.accept()", "def closeEvent(self, event):\n\n sys.exit()", "def c...
[ "0.833176", "0.82620287", "0.82620287", "0.82620287", "0.80923194", "0.8072515", "0.80208814", "0.80071384", "0.8003962", "0.7980135", "0.7966298", "0.78825104", "0.78405327", "0.784024", "0.78278977", "0.77431095", "0.7715871", "0.77045715", "0.77030706", "0.77030706", "0.76...
0.0
-1
Gets valid user credentials from storage. If nothing has been stored, or if the stored credentials are invalid, the OAuth2 flow is completed to obtain the new credentials.
def get_credentials(data_dir_path, client_secret_file_path, scopes, application_name): #home_dir = os.path.expanduser('~') #credential_dir = os.path.join(home_dir, '.credentials') credential_dir = os.path.join(data_dir_path, ".credentials") if not os.path.exists(credential_dir): os.makedirs(credential_dir) credential_path = os.path.join(credential_dir, 'gmail-python-quickstart.json') store = Storage(credential_path) credentials = store.get() if not credentials or credentials.invalid: flow = client.flow_from_clientsecrets(client_secret_file_path, scopes) flow.user_agent = application_name try: import argparse flags = argparse.ArgumentParser(parents=[tools.argparser]).parse_args() except ImportError: flags = None if flags: credentials = tools.run_flow(flow, store, flags) else: # Needed only for compatibility with Python 2.6 credentials = tools.run(flow, store) print('Storing credentials to ' + credential_path) return credentials
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _load_user_credentials(self, storage):\n # Set up a Flow object to be used if we need to authenticate.\n flow = client.flow_from_clientsecrets(\n self.client_secrets,\n scope=self.api_scopes,\n message=tools.message_if_missing(self.client_secrets))\n\n # Re...
[ "0.7571535", "0.7355913", "0.71540344", "0.7120326", "0.70709026", "0.7026004", "0.6991126", "0.69621813", "0.6947445", "0.69403243", "0.69352114", "0.69063586", "0.68998873", "0.6899442", "0.6857314", "0.6828924", "0.679187", "0.6784848", "0.677493", "0.677493", "0.677493", ...
0.6046647
96
return list of id of unread emails
def get_unread_email_ids(gmail_client): response = gmail_client.users().messages().list(userId='me',q='is:unread').execute() if 'messages' in response: # messages key only exists if there are unread messages return [message['id'] for message in response['messages']] else: print("No unread messages...") return [] # still return a list since that's what caller expects
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def list_unread_emails(self):\n try:\n query = 'is:unread from:scholaralerts-noreply@google.com'\n page_token = None\n p_emails = []\n while True:\n request = self.service.users().messages().list(userId='me',\n ...
[ "0.7194147", "0.70722127", "0.70129013", "0.69660854", "0.6963227", "0.6954459", "0.69394183", "0.69240445", "0.691629", "0.68746626", "0.68267304", "0.67761564", "0.67230296", "0.6694524", "0.6601468", "0.6559419", "0.6427694", "0.6418745", "0.6417917", "0.63268363", "0.6284...
0.8231231
0
Use gmail api to find new emails Return email addresses of new emails and whether or not they have postcard
def get_unread_email_data(gmail_client): unread_ids = get_unread_email_ids(gmail_client) for message_id in unread_ids: remove_unread_label = {'removeLabelIds': ['UNREAD']} gmail_client.users().messages().modify(userId='me', id=message_id, body=remove_unread_label).execute() message_data = gmail_client.users().messages().get(userId='me',id=message_id).execute() message_payload = message_data['payload'] has_attachment = 0 < len([part for part in message_payload['parts'] if part['mimeType'] == 'image/jpeg']) message_headers = message_payload['headers'] sender = [header['value'] for header in message_headers if header['name'] == 'Return-Path'][0] yield sender, has_attachment
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_emails():\n\n # generate the gmail api service\n service = build_gmail_api_v1()\n\n # compute date for one year ago\n today = date.today()\n one_year_ago = today - timedelta(days=365.25)\n start = one_year_ago - timedelta(days=1)\n end = one_year_ago + timedelta(days=1)\n start_stri...
[ "0.6836338", "0.68140227", "0.6734633", "0.6701311", "0.6606506", "0.6500365", "0.649841", "0.6341374", "0.6327239", "0.6249059", "0.62106127", "0.61267984", "0.61160463", "0.60011953", "0.59610665", "0.59563863", "0.5922605", "0.5915454", "0.5888645", "0.586136", "0.58241683...
0.5653958
27
Create a message for an email.
def create_message(sender, to, subject, message_text): message = MIMEText(message_text) message['to'] = to message['from'] = sender message['subject'] = subject raw = base64.urlsafe_b64encode(message.as_bytes()) return {'raw':raw.decode()}
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def createMessage( self, *args, **kw ):\n return MailMessage( *args, **kw )", "def createMessage( self, *args, **kw ):\n if not kw.has_key('charset'):\n kw['charset'] = self.getOutputCharset()\n kw['to_mail'] = 1\n return MailServerBase.createMessage( self, *args, **kw )", ...
[ "0.8220897", "0.783499", "0.7542338", "0.7535529", "0.7519684", "0.74869245", "0.7417828", "0.73909754", "0.7373257", "0.7355638", "0.73106784", "0.7269137", "0.719846", "0.7196095", "0.7140178", "0.7047853", "0.7021525", "0.70037013", "0.70037013", "0.68474555", "0.68418366"...
0.7297086
11
Send an email message.
def send_message(message, client): try: message = (client.users().messages().send(userId='me', body=message).execute()) print('Message Id: %s' % message['id']) return message except errors.HttpError as error: print('An error occurred: %s' % error)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def send_email(self, message):\n pass", "def send_mail(email):\n return email.send()", "def send_email(msg):\n\tprint(\"sendEmail: \" + msg)", "def send_email(message):\n mail_server = smtplib.SMTP('localhost')\n mail_server.send_message(message)\n mail_server.quit()", "def send_email(msg):\n ...
[ "0.8239757", "0.80890095", "0.79037267", "0.78920776", "0.7768939", "0.7730623", "0.7727504", "0.76961035", "0.76466435", "0.7635667", "0.7613354", "0.7608988", "0.7607686", "0.7554999", "0.7533295", "0.7488125", "0.74320126", "0.74103194", "0.7409468", "0.7407818", "0.740765...
0.0
-1
Makes a directory after checking whether it already exists.
def mkdir(directory, parents=True): path_dir = Path(directory) if not path_dir.exists(): path_dir.mkdir(parents=parents)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def make_dir(self):\n if not os.path.exists(self.d):\n try:\n os.mkdir(self.d)\n except OSError, e:\n if e.errno != 17:\n raise\n pass", "def new_dir(the_dir):\n try:\n os.makedirs(the_dir)\n except OSError ...
[ "0.83006763", "0.81420314", "0.80905235", "0.7991284", "0.7971449", "0.7967864", "0.79364747", "0.79364747", "0.7920723", "0.7886012", "0.78693926", "0.78422606", "0.7834246", "0.7830855", "0.78147686", "0.7812152", "0.7812152", "0.7812152", "0.7812152", "0.78007835", "0.7798...
0.0
-1
Removes an empty directory after checking whether it already exists.
def rmdir(directory, recursive=False): path_dir = Path(directory) if not path_dir.exists(): return if recursive: shutil.rmtree(path_dir) else: if len(list(path_dir.iterdir())) == 0: path_dir.rmdir() else: raise ValueError( "Cannot remove directory '{}' as it is not empty: consider removing it recursively".format(path_dir))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def remove_empty_path(self, empty):\n while True:\n if not os.path.isdir(empty):\n\n # Directory does not exist\n return\n\n if os.listdir(empty):\n # Directory is not empty\n return\n\n try:\n os.rmd...
[ "0.8056177", "0.78796035", "0.7780004", "0.7707977", "0.7627978", "0.7590993", "0.75686836", "0.75058484", "0.7489895", "0.7464971", "0.74595416", "0.7390957", "0.7353166", "0.7352782", "0.7333176", "0.7274257", "0.725936", "0.7209765", "0.7153305", "0.71452975", "0.71025324"...
0.0
-1
Create a k cluster data set with required separation. For the purposes of validating a proof, generate each cluster center such that it is at least 4 delta away from any other cluster for some value of delta > 0.
def gen_k_centers(k, dim): delta = abs(np.random.normal(0.0, 5.0)) eps = 0.001 centers = [] for i in range(k): c = np.random.multivariate_normal(np.zeros(dim), np.identity(dim)) if len(centers): c1 = centers[0] x = np.random.multivariate_normal(c1, np.identity(c1.size)) - c1 direction = x / np.linalg.norm(x) centers.append(c1 + 2.0 * i * delta * direction + eps) else: centers.append(c) return centers, delta
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def create_clusters(self):\n ex = 0\n print 'Iter - Purity Gini Index'\n while ex < self.MAX_ITERATION:\n new_clusters = np.zeros(self.centroids.shape)\n distances = euclidean_distances(self.vectors, self.centroids).argmin(axis=1)\n for i in ran...
[ "0.7273148", "0.7161901", "0.71461064", "0.70277727", "0.6989481", "0.6961698", "0.69509083", "0.6936706", "0.69362247", "0.68879706", "0.68621993", "0.67435825", "0.6723467", "0.67071426", "0.67059225", "0.66942155", "0.66667116", "0.6627539", "0.6592454", "0.6569137", "0.65...
0.73497814
0
Create a delta separated data set. Generate a set of centers for the clusters and from each center draw size number of points that constitute the points in that cluster. Then return a dataset of all points.
def create_dataset(dims, size, num_clusters=20): clusters, delta = gen_k_centers(num_clusters, dims) return _create_constrained_dataset(clusters, delta, size)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _create_constrained_dataset(centers, delta, size):\n dataset = []\n count = 0\n for i, c in enumerate(centers):\n for j in range(size):\n x = np.random.multivariate_normal(c, np.identity(np.size(c))) - c\n direction = x / np.linalg.norm(x)\n magnitude = np.rando...
[ "0.75692487", "0.6472027", "0.6142992", "0.61193955", "0.61097693", "0.6057844", "0.6014714", "0.5996427", "0.59905845", "0.5968957", "0.5963011", "0.5947673", "0.5943788", "0.5909058", "0.5908519", "0.5902958", "0.5898489", "0.5893198", "0.58916074", "0.58719695", "0.5860178...
0.6139342
3
Create a deltaseparated dataset. For each of the centers draw size number of points. No two points may be farther than delta away form each other. Thus, to generate each point, choosea random direction and random distance from the center (of up to 0.5 delta).
def _create_constrained_dataset(centers, delta, size): dataset = [] count = 0 for i, c in enumerate(centers): for j in range(size): x = np.random.multivariate_normal(c, np.identity(np.size(c))) - c direction = x / np.linalg.norm(x) magnitude = np.random.uniform(0.0, 0.5 * delta) # magnitude = np.random.uniform(0.0, delta) # NOT DEL-SEPARATED dataset.append((c + magnitude * direction, i, count)) count += 1 return dataset
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def generate_point_cloud(n:int, d:int = 2, seed=1234) -> np.ndarray:\n initial_seed = np.random.get_state()\n np.random.seed(seed)\n points = np.random.rand(n, d)\n np.random.set_state(initial_seed)\n return points", "def generate(self, n, d, seperation=4.0, pos_fraction=0.5):\n self.n = n\...
[ "0.6200814", "0.60125065", "0.6008298", "0.58964354", "0.5652766", "0.5627772", "0.56074226", "0.55708045", "0.55676484", "0.54426163", "0.54371774", "0.5432757", "0.54286873", "0.5412957", "0.53979236", "0.53931975", "0.53886104", "0.5388139", "0.5384613", "0.53822875", "0.5...
0.70498824
0
Create 4 cluster centers. Create gaussians centered at (1,1), (1,1), (1,1) and (1,1). Each has standard covariance.
def gen_4_normal(): return [mn(mean=np.array([1.0, 1.0]), cov=np.array([[1.0, 0.0], [0.0, 1.0]])), mn(mean=np.array([1.0, -1.0]), cov=np.array([[1.0, 0.0], [0.0, 1.0]])), mn(mean=np.array([-1.0, -1.0]), cov=np.array([[1.0, 0.0], [0.0, 1.0]])), mn(mean=np.array([-1.0, 1.0]), cov=np.array([[1.0, 0.0], [0.0, 1.0]]))]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def generate_centers(self):\n\t\tcenters = []\n\t\tsize = self.config.image_size\n\t\tfor i in range(self.config.num_obj):\n\t\t\tflag = True\n\t\t\twhile flag:\n\t\t\t\tc = np.random.randint(int(size * 0.05), int(size * 0.95), 2)\n\t\t\t\tflag = False\n\t\t\t\tfor center in centers:\n\t\t\t\t\tif (abs(center[0] -...
[ "0.6578101", "0.6553022", "0.6369997", "0.63132054", "0.62990874", "0.62756854", "0.62509024", "0.6190687", "0.6112884", "0.6097691", "0.60697997", "0.60543525", "0.5993539", "0.598463", "0.5970525", "0.5952007", "0.5925367", "0.5922735", "0.5904544", "0.590372", "0.590219", ...
0.5482319
70
Create 4 cluster centers. Create gaussians centered at (10,10), (10,10), (10,10) and (10,10). Each has standard covariance.
def _4_normal_spread(): return [mn(mean=np.array([10.0, 10.0]), cov=np.array([[1.0, 0.0], [0.0, 1.0]])), mn(mean=np.array([10.0, -10.0]), cov=np.array([[1.0, 0.0], [0.0, 1.0]])), mn(mean=np.array([-10.0, -10.0]), cov=np.array([[1.0, 0.0], [0.0, 1.0]])), mn(mean=np.array([-10.0, 10.0]), cov=np.array([[1.0, 0.0], [0.0, 1.0]]))]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def random_init(self, train_data):\n\n centroids=np.zeros((self.n_clusters_, train_data.shape[1]))\n for c in range(self.n_clusters_):\n for f in range(train_data.shape[1]):\n centroids[c,f]=random.uniform(min(train_data[:,f]), max(train_data[:,f]))\n\n return centroi...
[ "0.6637167", "0.6619889", "0.64092225", "0.6370468", "0.6316697", "0.6288295", "0.6286796", "0.6268664", "0.6214184", "0.61212265", "0.6098709", "0.60670376", "0.60576606", "0.6025386", "0.6019331", "0.60053873", "0.59957886", "0.5989434", "0.5931372", "0.5907089", "0.5906791...
0.0
-1
Create a 5x5 grid of cluster centers. Create 25 cluster centers on the grid I^{[0, 4] x [0,4]}. Each center is a gaussian with standard covariance
def _5x5_grid_clusters(): return [mn(mean=np.array([i, j]), cov=np.array([[1.0, 0.0], [0.0, 1.0]])) for i in range(5) for j in range(5)]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _5x5_grid_clusters_spread():\n return [mn(mean=np.array([i * 25, j * 25]), cov=np.array([[1.0, 0.0],\n [0.0, 1.0]]))\n for i in range(5)\n for j in range(5)]", "def _10x10_grid_clusters_spread():\n return [mn(mean=np.array([i * 25...
[ "0.76212066", "0.7217054", "0.71860254", "0.69142705", "0.68518704", "0.68461156", "0.66278195", "0.66259587", "0.6578559", "0.63746804", "0.63681006", "0.63251126", "0.6259267", "0.62513924", "0.6211149", "0.62032765", "0.61599225", "0.6159027", "0.6151934", "0.6151934", "0....
0.8075378
0
Create a 5x5 grid of cluster centers. Create 25 cluster centers on the grid I^{[0, 4] x [0,4]}. Each center is a gaussian with standard covariance
def _5x5_grid_clusters_spread(): return [mn(mean=np.array([i * 25, j * 25]), cov=np.array([[1.0, 0.0], [0.0, 1.0]])) for i in range(5) for j in range(5)]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _5x5_grid_clusters():\n return [mn(mean=np.array([i, j]), cov=np.array([[1.0, 0.0],\n [0.0, 1.0]]))\n for i in range(5)\n for j in range(5)]", "def _10x10_grid_clusters_spread():\n return [mn(mean=np.array([i * 25, j * 25]), cov=np.array([[...
[ "0.8075657", "0.7216367", "0.7186302", "0.691356", "0.68508494", "0.6845249", "0.66261995", "0.6625447", "0.6577691", "0.6374363", "0.63676125", "0.6322932", "0.625821", "0.6250707", "0.6211498", "0.6203698", "0.6160035", "0.6157637", "0.6151316", "0.6151316", "0.6094953", ...
0.762155
1
Create a 5x5 grid of cluster centers. Create 25 cluster centers on the grid I^{[0, 4] x [0,4]}. Each center is a gaussian with standard covariance
def _5x5_grid_clusters_close(): return [mn(mean=np.array([i * 5, j * 5]), cov=np.array([[1.0, 0.0], [0.0, 1.0]])) for i in range(5) for j in range(5)]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _5x5_grid_clusters():\n return [mn(mean=np.array([i, j]), cov=np.array([[1.0, 0.0],\n [0.0, 1.0]]))\n for i in range(5)\n for j in range(5)]", "def _5x5_grid_clusters_spread():\n return [mn(mean=np.array([i * 25, j * 25]), cov=np.array([[1....
[ "0.80750877", "0.7620803", "0.72162825", "0.69135714", "0.6852057", "0.6845415", "0.66279685", "0.6627276", "0.65779185", "0.6375827", "0.6368961", "0.6324704", "0.62609804", "0.62517524", "0.6210675", "0.6203783", "0.6160406", "0.61593336", "0.6152343", "0.6152343", "0.60966...
0.7185599
3
Create a 3x3 grid of cluster centers. Create 25 cluster centers on the grid I^{[0, 4] x [0,4]}. Each center is a gaussian with standard covariance
def _2x3_grid_clusters_close(): return [mn(mean=np.array([i * 5, j * 5]), cov=np.array([[1.0, 0.0], [0.0, 1.0]])) for i in range(2) for j in range(3)]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _5x5_grid_clusters():\n return [mn(mean=np.array([i, j]), cov=np.array([[1.0, 0.0],\n [0.0, 1.0]]))\n for i in range(5)\n for j in range(5)]", "def _5x5_grid_clusters_spread():\n return [mn(mean=np.array([i * 25, j * 25]), cov=np.array([[1....
[ "0.7743275", "0.7348963", "0.7345636", "0.72138846", "0.693622", "0.6857599", "0.678586", "0.6746427", "0.6744042", "0.6541813", "0.6501836", "0.65014625", "0.6451203", "0.631673", "0.63058984", "0.6281105", "0.6236732", "0.6231005", "0.62132615", "0.62098527", "0.6208093", ...
0.677395
7
Create a 3x3 grid of cluster centers. Create 25 cluster centers on the grid I^{[0, 4] x [0,4]}. Each center is a gaussian with standard covariance
def _2x3_grid_clusters_spread(): return [mn(mean=np.array([i * 25, j * 25]), cov=np.array([[1.0, 0.0], [0.0, 1.0]])) for i in range(2) for j in range(3)]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _5x5_grid_clusters():\n return [mn(mean=np.array([i, j]), cov=np.array([[1.0, 0.0],\n [0.0, 1.0]]))\n for i in range(5)\n for j in range(5)]", "def _5x5_grid_clusters_spread():\n return [mn(mean=np.array([i * 25, j * 25]), cov=np.array([[1....
[ "0.7741456", "0.7347127", "0.7343865", "0.69358754", "0.68556786", "0.6784003", "0.67725694", "0.67446214", "0.6744467", "0.6542266", "0.65019643", "0.6500571", "0.6449538", "0.6316034", "0.63062775", "0.6281654", "0.62352514", "0.62311894", "0.6211723", "0.6209921", "0.62071...
0.72126484
3
Create a 3x3 grid of cluster centers. Create 25 cluster centers on the grid I^{[0, 4] x [0,4]}. Each center is a gaussian with standard covariance
def _10x10_grid_clusters_close(): return [mn(mean=np.array([i * 5, j * 5]), cov=np.array([[1.0, 0.0], [0.0, 1.0]])) for i in range(10) for j in range(10)]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _5x5_grid_clusters():\n return [mn(mean=np.array([i, j]), cov=np.array([[1.0, 0.0],\n [0.0, 1.0]]))\n for i in range(5)\n for j in range(5)]", "def _5x5_grid_clusters_spread():\n return [mn(mean=np.array([i * 25, j * 25]), cov=np.array([[1....
[ "0.7742221", "0.734794", "0.7344126", "0.7212588", "0.6936357", "0.678427", "0.67730224", "0.6745758", "0.67450356", "0.65422076", "0.65023965", "0.6501117", "0.6449754", "0.63159335", "0.6306894", "0.628159", "0.6236001", "0.6232418", "0.6210907", "0.62107736", "0.62073714",...
0.6856434
5
Create a 3x3 grid of cluster centers. Create 25 cluster centers on the grid I^{[0, 4] x [0,4]}. Each center is a gaussian with standard covariance
def _10x10_grid_clusters_spread(): return [mn(mean=np.array([i * 25, j * 25]), cov=np.array([[1.0, 0.0], [0.0, 1.0]])) for i in range(10) for j in range(10)]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _5x5_grid_clusters():\n return [mn(mean=np.array([i, j]), cov=np.array([[1.0, 0.0],\n [0.0, 1.0]]))\n for i in range(5)\n for j in range(5)]", "def _5x5_grid_clusters_spread():\n return [mn(mean=np.array([i * 25, j * 25]), cov=np.array([[1....
[ "0.7741881", "0.73474836", "0.7212278", "0.69355553", "0.68558514", "0.67842954", "0.67727154", "0.6745349", "0.67441016", "0.6541365", "0.65011424", "0.6499605", "0.6450051", "0.63159925", "0.6306321", "0.6281557", "0.6235609", "0.6231545", "0.62127733", "0.6210027", "0.6207...
0.7343511
2
Create random cluster centers. Create n cluster centers randomly. Each cluster center is a draw from a gaussian distribution centered at (0,0) with standard covariance.
def _random_standard_centers(n=100): generator = mn(mean=np.array([0, 0]), cov=np.array([[1.0, 0.0], [0.0, 1.0]])) return [mn(mean=pt, cov=np.array([[1.0, 0.0], [0.0, 1.0]])) for pt in generator.rvs(size=n)]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def make_sample_clusters(n_clusters, n_points, n_features=2, std=2, seed=1, limits=(-10, 10)):\n points_per_cluster = n_points // n_clusters\n np.random.seed(seed=seed)\n centroids = []\n for _ in range(n_features):\n centroids.append(np.random.randint(limits[0], limits[1], size=n_clusters))\n\n...
[ "0.76822317", "0.76259226", "0.73836875", "0.7335838", "0.7241687", "0.7094557", "0.7092686", "0.7072594", "0.7042424", "0.6994134", "0.6977371", "0.69499964", "0.6929814", "0.6919069", "0.6892269", "0.6817309", "0.6758312", "0.6726237", "0.6718225", "0.66974187", "0.6696951"...
0.7926905
0
Method that returns the JSON string representation of list_dictionaries
def to_json_string(list_dictionaries): if list_dictionaries is None: return "[]" else: string = json.dumps(list_dictionaries) return string
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def to_json_string(list_dictionaries):\n if list_dictionaries is None:\n list_dictionaries = []\n return json.dumps(list_dictionaries)", "def to_json_string(list_dictionaries):\n if list_dictionaries is None:\n list_dictionaries = []\n return json.dumps(list_dict...
[ "0.841099", "0.841099", "0.8314405", "0.8295821", "0.8295821", "0.8267931", "0.8262547", "0.8249925", "0.82311714", "0.8218337", "0.8209053", "0.81681156", "0.8161104", "0.81400317", "0.8131461", "0.81231254", "0.8110287", "0.80916226", "0.74270463", "0.6780553", "0.67750645"...
0.81934065
11
Method that writes the JSON string representation of list_objs to a file
def save_to_file(cls, list_objs): filename = cls.__name__ + ".json" result = [] if list_objs: for objs in list_objs: dictionary = objs.to_dictionary() result.append(dictionary) with open(filename, "w", encoding="utf-8") as file: file.write(cls.to_json_string(result))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def save_to_file(cls, list_objs):\n filename = cls.__name__ + \".json\"\n with open(filename, \"w\") as f:\n if list_objs is None:\n f.write(\"[]\")\n else:\n d = [x.to_dictionary() for x in list_objs]\n f.write(Base.to_json_string(d)...
[ "0.86339736", "0.8606109", "0.8603492", "0.85995936", "0.8557784", "0.8546807", "0.85401046", "0.8516957", "0.8511827", "0.84710205", "0.84403396", "0.8420791", "0.8380287", "0.8351859", "0.83487356", "0.8338624", "0.832877", "0.82504755", "0.824626", "0.8242384", "0.8223594"...
0.83550686
13
Method that returns the list of the JSON string representation json_string
def from_json_string(json_string): if json_string is None or len(json_string) == 0: return [] return json.loads(json_string)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def from_json_string(json_string):\n lis = []\n # not sure if empty means empty string or len(jstr) < 1\n if json_string is None or json_string == \"\":\n return lis\n return json.loads(json_string)", "def from_json_string(json_string):\n empty_list = []\n if ...
[ "0.7202782", "0.71640325", "0.71429116", "0.7090925", "0.70497406", "0.70338446", "0.6975703", "0.68670994", "0.6847883", "0.6829183", "0.6806034", "0.6722226", "0.66883564", "0.6621443", "0.66010094", "0.66010094", "0.6556371", "0.6529802", "0.648166", "0.64465785", "0.63312...
0.64909357
19
Method that returns an instance with all attributes already set
def create(cls, **dictionary): if cls.__name__ == "Rectangle": dummy = cls(1, 1) elif cls.__name__ == "Square": dummy = cls(1) dummy.update(**dictionary) return dummy
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def copy(self):\n def init(self, **kwargs):\n \"\"\"Initialization.\"\"\"\n for k, v in kwargs.items():\n setattr(self, k, v)\n\n return type(type(self).__name__, (type(self),), {\"__init__\": init})(**self.__dict__)", "def make(cls, **kwargs):\r\n return...
[ "0.6719594", "0.6588282", "0.6492425", "0.643093", "0.63870096", "0.638417", "0.638417", "0.6381096", "0.6337553", "0.63111466", "0.63078123", "0.6277938", "0.62500143", "0.6229394", "0.6193025", "0.6179197", "0.61522555", "0.6148272", "0.61313915", "0.6122863", "0.60904044",...
0.0
-1
Method that returns a list of instances
def load_from_file(cls): filename = cls.__name__ + ".json" result = [] try: with open(filename, encoding="utf-8") as file: obj_list = cls.from_json_string(file.read()) for dictionary in obj_list: result.append(cls.create(**dictionary)) return result except: return result
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_instances(cls):\n raise NotImplementedError", "def list_instances(self):\n # list instances\n self._list_instances()", "async def get_instances(self, **kwargs) -> List[ApiResource]:\n raw_resources = await self.get_resources(**kwargs)\n _instances = [\n sel...
[ "0.8240453", "0.77040386", "0.7198313", "0.7156317", "0.7027863", "0.70239186", "0.69569033", "0.69567513", "0.6942843", "0.6935414", "0.6921797", "0.6883705", "0.6883301", "0.6853721", "0.6853102", "0.6853102", "0.68430334", "0.68341786", "0.68341786", "0.68209326", "0.68209...
0.0
-1
Method to serialize in csv
def save_to_file_csv(cls, list_objs): list_rectangle = ["id", "width", "height", "x", "y"] list_square = ["id", "size", "x", "y"] filename = cls.__name__ + ".csv" result = [] if list_objs: for objs in list_objs: # First recollect the info of the object with a dict dictionary = objs.to_dictionary() middle_result = [] # Second obtein the values in a ordered class list if cls.__name__ == "Rectangle": for item in list_rectangle: middle_result.append(dictionary[item]) if cls.__name__ == "Square": for item in list_square: middle_result.append(dictionary[item]) # append the list to result list result.append(middle_result) with open(filename, "w", encoding="utf-8") as file: writer = csv.writer(file) writer.writerows(result)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_csv_string(self, **kwargs):\n ...", "def to_csv(self, csvwriter):\n csvwriter.writerow(self.to_csv_row())", "def toCsv(self, csv_path):\n ser = pd.Series(self)\n ser.to_csv(csv_path)", "def ToCsv(self):\n\n def csv_helper(the_dict, the_field):\n if the_field not ...
[ "0.73075175", "0.7227191", "0.7032894", "0.70270133", "0.6928391", "0.6888921", "0.6879478", "0.6835335", "0.67983305", "0.665103", "0.6593733", "0.6589045", "0.6541852", "0.6517649", "0.6497261", "0.6496066", "0.6471588", "0.64418876", "0.6415566", "0.6408056", "0.6371749", ...
0.0
-1